diff --git a/.core_files.yaml b/.core_files.yaml index 067a6a2b41d..08cabb71164 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -49,6 +49,7 @@ base_platforms: &base_platforms - homeassistant/components/tts/** - homeassistant/components/update/** - homeassistant/components/vacuum/** + - homeassistant/components/valve/** - homeassistant/components/water_heater/** - homeassistant/components/weather/** diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 99a48360b41..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,1733 +0,0 @@ -# Sorted by hassfest. -# -# To sort, run python3 -m script.hassfest -p coverage - -[run] -source = homeassistant -omit = - homeassistant/__main__.py - homeassistant/helpers/backports/aiohttp_resolver.py - homeassistant/helpers/signal.py - homeassistant/scripts/__init__.py - homeassistant/scripts/benchmark/__init__.py - homeassistant/scripts/check_config.py - homeassistant/scripts/ensure_config.py - homeassistant/scripts/macos/__init__.py - - # omit pieces of code that rely on external devices being present - homeassistant/components/acer_projector/* - homeassistant/components/acmeda/__init__.py - homeassistant/components/acmeda/base.py - homeassistant/components/acmeda/cover.py - homeassistant/components/acmeda/errors.py - homeassistant/components/acmeda/helpers.py - homeassistant/components/acmeda/hub.py - homeassistant/components/acmeda/sensor.py - homeassistant/components/actiontec/const.py - homeassistant/components/actiontec/device_tracker.py - homeassistant/components/actiontec/model.py - homeassistant/components/adax/__init__.py - homeassistant/components/adax/climate.py - homeassistant/components/adguard/__init__.py - homeassistant/components/adguard/entity.py - homeassistant/components/adguard/sensor.py - homeassistant/components/adguard/switch.py - homeassistant/components/ads/* - homeassistant/components/aftership/__init__.py - homeassistant/components/aftership/sensor.py - homeassistant/components/agent_dvr/alarm_control_panel.py - homeassistant/components/agent_dvr/camera.py - homeassistant/components/agent_dvr/helpers.py - homeassistant/components/airnow/__init__.py - homeassistant/components/airnow/coordinator.py - homeassistant/components/airnow/sensor.py - homeassistant/components/airq/__init__.py - homeassistant/components/airq/coordinator.py - homeassistant/components/airq/sensor.py - homeassistant/components/airthings/__init__.py - homeassistant/components/airthings/sensor.py - homeassistant/components/airthings_ble/__init__.py - homeassistant/components/airthings_ble/sensor.py - homeassistant/components/airtouch4/__init__.py - homeassistant/components/airtouch4/climate.py - homeassistant/components/airtouch4/coordinator.py - homeassistant/components/airtouch5/__init__.py - homeassistant/components/airtouch5/climate.py - homeassistant/components/airtouch5/entity.py - homeassistant/components/airvisual/__init__.py - homeassistant/components/airvisual/sensor.py - homeassistant/components/airvisual_pro/__init__.py - homeassistant/components/airvisual_pro/sensor.py - homeassistant/components/alarmdecoder/__init__.py - homeassistant/components/alarmdecoder/alarm_control_panel.py - homeassistant/components/alarmdecoder/binary_sensor.py - homeassistant/components/alarmdecoder/entity.py - homeassistant/components/alarmdecoder/sensor.py - homeassistant/components/alpha_vantage/sensor.py - homeassistant/components/amazon_polly/* - homeassistant/components/ambient_station/__init__.py - homeassistant/components/ambient_station/binary_sensor.py - homeassistant/components/ambient_station/entity.py - homeassistant/components/ambient_station/sensor.py - homeassistant/components/amcrest/* - homeassistant/components/ampio/* - homeassistant/components/android_ip_webcam/switch.py - homeassistant/components/anel_pwrctrl/switch.py - homeassistant/components/anthemav/media_player.py - homeassistant/components/apple_tv/__init__.py - homeassistant/components/apple_tv/browse_media.py - homeassistant/components/apple_tv/media_player.py - homeassistant/components/apple_tv/remote.py - homeassistant/components/aprilaire/__init__.py - homeassistant/components/aprilaire/climate.py - homeassistant/components/aprilaire/coordinator.py - homeassistant/components/aprilaire/entity.py - homeassistant/components/aprilaire/select.py - homeassistant/components/aprilaire/sensor.py - homeassistant/components/apsystems/__init__.py - homeassistant/components/apsystems/coordinator.py - homeassistant/components/apsystems/entity.py - homeassistant/components/apsystems/number.py - homeassistant/components/apsystems/sensor.py - homeassistant/components/aqualogic/* - homeassistant/components/aquostv/media_player.py - homeassistant/components/arcam_fmj/__init__.py - homeassistant/components/arcam_fmj/media_player.py - homeassistant/components/arest/binary_sensor.py - homeassistant/components/arest/sensor.py - homeassistant/components/arest/switch.py - homeassistant/components/arris_tg2492lg/* - homeassistant/components/aruba/device_tracker.py - homeassistant/components/arwn/sensor.py - homeassistant/components/aseko_pool_live/__init__.py - homeassistant/components/aseko_pool_live/binary_sensor.py - homeassistant/components/aseko_pool_live/coordinator.py - homeassistant/components/aseko_pool_live/entity.py - homeassistant/components/aseko_pool_live/sensor.py - homeassistant/components/asterisk_cdr/mailbox.py - homeassistant/components/asterisk_mbox/mailbox.py - homeassistant/components/aten_pe/* - homeassistant/components/atome/* - homeassistant/components/aurora/__init__.py - homeassistant/components/aurora/binary_sensor.py - homeassistant/components/aurora/coordinator.py - homeassistant/components/aurora/entity.py - homeassistant/components/aurora/sensor.py - homeassistant/components/avea/light.py - homeassistant/components/avion/light.py - homeassistant/components/awair/coordinator.py - homeassistant/components/azure_service_bus/* - homeassistant/components/baf/__init__.py - homeassistant/components/baf/binary_sensor.py - homeassistant/components/baf/climate.py - homeassistant/components/baf/entity.py - homeassistant/components/baf/fan.py - homeassistant/components/baf/light.py - homeassistant/components/baf/number.py - homeassistant/components/baf/sensor.py - homeassistant/components/baf/switch.py - homeassistant/components/baidu/tts.py - homeassistant/components/bang_olufsen/entity.py - homeassistant/components/bang_olufsen/media_player.py - homeassistant/components/bang_olufsen/util.py - homeassistant/components/bang_olufsen/websocket.py - homeassistant/components/bbox/device_tracker.py - homeassistant/components/bbox/sensor.py - homeassistant/components/beewi_smartclim/sensor.py - homeassistant/components/bitcoin/sensor.py - homeassistant/components/bizkaibus/sensor.py - homeassistant/components/blink/__init__.py - homeassistant/components/blink/alarm_control_panel.py - homeassistant/components/blink/binary_sensor.py - homeassistant/components/blink/camera.py - homeassistant/components/blink/sensor.py - homeassistant/components/blink/switch.py - homeassistant/components/blinksticklight/light.py - homeassistant/components/blockchain/sensor.py - homeassistant/components/bloomsky/* - homeassistant/components/bluesound/* - homeassistant/components/bluetooth_tracker/* - homeassistant/components/bmw_connected_drive/notify.py - homeassistant/components/bosch_shc/__init__.py - homeassistant/components/bosch_shc/binary_sensor.py - homeassistant/components/bosch_shc/cover.py - homeassistant/components/bosch_shc/entity.py - homeassistant/components/bosch_shc/sensor.py - homeassistant/components/bosch_shc/switch.py - homeassistant/components/braviatv/button.py - homeassistant/components/braviatv/coordinator.py - homeassistant/components/braviatv/media_player.py - homeassistant/components/braviatv/remote.py - homeassistant/components/bring/coordinator.py - homeassistant/components/bring/todo.py - homeassistant/components/broadlink/climate.py - homeassistant/components/broadlink/light.py - homeassistant/components/broadlink/remote.py - homeassistant/components/broadlink/switch.py - homeassistant/components/broadlink/updater.py - homeassistant/components/brottsplatskartan/sensor.py - homeassistant/components/browser/* - homeassistant/components/brunt/__init__.py - homeassistant/components/brunt/cover.py - homeassistant/components/bsblan/climate.py - homeassistant/components/bt_home_hub_5/device_tracker.py - homeassistant/components/bt_smarthub/device_tracker.py - homeassistant/components/buienradar/sensor.py - homeassistant/components/buienradar/util.py - homeassistant/components/buienradar/weather.py - homeassistant/components/canary/camera.py - homeassistant/components/cert_expiry/helper.py - homeassistant/components/channels/* - homeassistant/components/cisco_ios/device_tracker.py - homeassistant/components/cisco_mobility_express/device_tracker.py - homeassistant/components/cisco_webex_teams/notify.py - homeassistant/components/citybikes/sensor.py - homeassistant/components/clementine/media_player.py - homeassistant/components/clickatell/notify.py - homeassistant/components/clicksend/notify.py - homeassistant/components/clicksend_tts/notify.py - homeassistant/components/cmus/media_player.py - homeassistant/components/coinbase/sensor.py - homeassistant/components/comed_hourly_pricing/sensor.py - homeassistant/components/comelit/__init__.py - homeassistant/components/comelit/alarm_control_panel.py - homeassistant/components/comelit/climate.py - homeassistant/components/comelit/coordinator.py - homeassistant/components/comelit/cover.py - homeassistant/components/comelit/humidifier.py - homeassistant/components/comelit/light.py - homeassistant/components/comelit/sensor.py - homeassistant/components/comelit/switch.py - homeassistant/components/comfoconnect/fan.py - homeassistant/components/concord232/alarm_control_panel.py - homeassistant/components/concord232/binary_sensor.py - homeassistant/components/control4/__init__.py - homeassistant/components/control4/director_utils.py - homeassistant/components/control4/light.py - homeassistant/components/control4/media_player.py - homeassistant/components/coolmaster/coordinator.py - homeassistant/components/cppm_tracker/device_tracker.py - homeassistant/components/crownstone/__init__.py - homeassistant/components/crownstone/devices.py - homeassistant/components/crownstone/entry_manager.py - homeassistant/components/crownstone/helpers.py - homeassistant/components/crownstone/light.py - homeassistant/components/crownstone/listeners.py - homeassistant/components/cups/sensor.py - homeassistant/components/currencylayer/sensor.py - homeassistant/components/daikin/climate.py - homeassistant/components/daikin/sensor.py - homeassistant/components/daikin/switch.py - homeassistant/components/danfoss_air/* - homeassistant/components/ddwrt/device_tracker.py - homeassistant/components/decora/light.py - homeassistant/components/decora_wifi/light.py - homeassistant/components/delijn/* - homeassistant/components/deluge/__init__.py - homeassistant/components/deluge/coordinator.py - homeassistant/components/deluge/sensor.py - homeassistant/components/deluge/switch.py - homeassistant/components/denon/media_player.py - homeassistant/components/denonavr/__init__.py - homeassistant/components/denonavr/media_player.py - homeassistant/components/denonavr/receiver.py - homeassistant/components/digital_ocean/* - homeassistant/components/discogs/sensor.py - homeassistant/components/discord/__init__.py - homeassistant/components/discord/notify.py - homeassistant/components/dlib_face_detect/image_processing.py - homeassistant/components/dlib_face_identify/image_processing.py - homeassistant/components/dlink/data.py - homeassistant/components/dominos/* - homeassistant/components/doods/* - homeassistant/components/doorbird/__init__.py - homeassistant/components/doorbird/button.py - homeassistant/components/doorbird/camera.py - homeassistant/components/doorbird/device.py - homeassistant/components/doorbird/entity.py - homeassistant/components/doorbird/util.py - homeassistant/components/doorbird/view.py - homeassistant/components/dormakaba_dkey/__init__.py - homeassistant/components/dormakaba_dkey/binary_sensor.py - homeassistant/components/dormakaba_dkey/entity.py - homeassistant/components/dormakaba_dkey/lock.py - homeassistant/components/dormakaba_dkey/sensor.py - homeassistant/components/dovado/* - homeassistant/components/downloader/__init__.py - homeassistant/components/dte_energy_bridge/sensor.py - homeassistant/components/dublin_bus_transport/sensor.py - homeassistant/components/dunehd/__init__.py - homeassistant/components/dunehd/media_player.py - homeassistant/components/duotecno/__init__.py - homeassistant/components/duotecno/binary_sensor.py - homeassistant/components/duotecno/climate.py - homeassistant/components/duotecno/cover.py - homeassistant/components/duotecno/entity.py - homeassistant/components/duotecno/light.py - homeassistant/components/duotecno/switch.py - homeassistant/components/dwd_weather_warnings/coordinator.py - homeassistant/components/dwd_weather_warnings/sensor.py - homeassistant/components/dweet/* - homeassistant/components/ebox/sensor.py - homeassistant/components/ebusd/* - homeassistant/components/ecoal_boiler/* - homeassistant/components/ecobee/__init__.py - homeassistant/components/ecobee/binary_sensor.py - homeassistant/components/ecobee/climate.py - homeassistant/components/ecobee/notify.py - homeassistant/components/ecobee/sensor.py - homeassistant/components/ecobee/weather.py - homeassistant/components/ecoforest/__init__.py - homeassistant/components/ecoforest/coordinator.py - homeassistant/components/ecoforest/entity.py - homeassistant/components/ecoforest/number.py - homeassistant/components/ecoforest/sensor.py - homeassistant/components/ecoforest/switch.py - homeassistant/components/econet/__init__.py - homeassistant/components/econet/binary_sensor.py - homeassistant/components/econet/climate.py - homeassistant/components/econet/sensor.py - homeassistant/components/econet/water_heater.py - homeassistant/components/ecovacs/controller.py - homeassistant/components/ecovacs/entity.py - homeassistant/components/ecovacs/image.py - homeassistant/components/ecovacs/number.py - homeassistant/components/ecovacs/util.py - homeassistant/components/ecovacs/vacuum.py - homeassistant/components/ecowitt/__init__.py - homeassistant/components/ecowitt/binary_sensor.py - homeassistant/components/ecowitt/entity.py - homeassistant/components/ecowitt/sensor.py - homeassistant/components/eddystone_temperature/sensor.py - homeassistant/components/edimax/switch.py - homeassistant/components/edl21/__init__.py - homeassistant/components/edl21/sensor.py - homeassistant/components/egardia/* - homeassistant/components/electrasmart/__init__.py - homeassistant/components/electrasmart/climate.py - homeassistant/components/electric_kiwi/__init__.py - homeassistant/components/electric_kiwi/api.py - homeassistant/components/electric_kiwi/coordinator.py - homeassistant/components/electric_kiwi/oauth2.py - homeassistant/components/electric_kiwi/select.py - homeassistant/components/eliqonline/sensor.py - homeassistant/components/elkm1/__init__.py - homeassistant/components/elkm1/alarm_control_panel.py - homeassistant/components/elkm1/binary_sensor.py - homeassistant/components/elkm1/climate.py - homeassistant/components/elkm1/light.py - homeassistant/components/elkm1/sensor.py - homeassistant/components/elkm1/switch.py - homeassistant/components/elmax/__init__.py - homeassistant/components/elmax/alarm_control_panel.py - homeassistant/components/elmax/binary_sensor.py - homeassistant/components/elmax/coordinator.py - homeassistant/components/elmax/cover.py - homeassistant/components/elmax/switch.py - homeassistant/components/elv/* - homeassistant/components/elvia/__init__.py - homeassistant/components/elvia/importer.py - homeassistant/components/emby/media_player.py - homeassistant/components/emoncms/sensor.py - homeassistant/components/emoncms_history/* - homeassistant/components/emonitor/__init__.py - homeassistant/components/emonitor/sensor.py - homeassistant/components/enigma2/media_player.py - homeassistant/components/enocean/__init__.py - homeassistant/components/enocean/binary_sensor.py - homeassistant/components/enocean/device.py - homeassistant/components/enocean/dongle.py - homeassistant/components/enocean/light.py - homeassistant/components/enocean/sensor.py - homeassistant/components/enocean/switch.py - homeassistant/components/enphase_envoy/__init__.py - homeassistant/components/enphase_envoy/binary_sensor.py - homeassistant/components/enphase_envoy/coordinator.py - homeassistant/components/enphase_envoy/entity.py - homeassistant/components/enphase_envoy/number.py - homeassistant/components/enphase_envoy/select.py - homeassistant/components/enphase_envoy/sensor.py - homeassistant/components/enphase_envoy/switch.py - homeassistant/components/entur_public_transport/* - homeassistant/components/environment_canada/__init__.py - homeassistant/components/environment_canada/camera.py - homeassistant/components/environment_canada/sensor.py - homeassistant/components/environment_canada/weather.py - homeassistant/components/envisalink/* - homeassistant/components/ephember/climate.py - homeassistant/components/epic_games_store/__init__.py - homeassistant/components/epic_games_store/coordinator.py - homeassistant/components/epion/__init__.py - homeassistant/components/epion/coordinator.py - homeassistant/components/epion/sensor.py - homeassistant/components/epson/__init__.py - homeassistant/components/epson/media_player.py - homeassistant/components/eq3btsmart/__init__.py - homeassistant/components/eq3btsmart/climate.py - homeassistant/components/eq3btsmart/entity.py - homeassistant/components/eq3btsmart/models.py - homeassistant/components/escea/__init__.py - homeassistant/components/escea/climate.py - homeassistant/components/escea/discovery.py - homeassistant/components/etherscan/sensor.py - homeassistant/components/eufy/* - homeassistant/components/eufylife_ble/__init__.py - homeassistant/components/eufylife_ble/sensor.py - homeassistant/components/everlights/light.py - homeassistant/components/evohome/* - homeassistant/components/ezviz/__init__.py - homeassistant/components/ezviz/alarm_control_panel.py - homeassistant/components/ezviz/binary_sensor.py - homeassistant/components/ezviz/button.py - homeassistant/components/ezviz/camera.py - homeassistant/components/ezviz/coordinator.py - homeassistant/components/ezviz/entity.py - homeassistant/components/ezviz/image.py - homeassistant/components/ezviz/light.py - homeassistant/components/ezviz/number.py - homeassistant/components/ezviz/select.py - homeassistant/components/ezviz/sensor.py - homeassistant/components/ezviz/siren.py - homeassistant/components/ezviz/switch.py - homeassistant/components/ezviz/update.py - homeassistant/components/faa_delays/__init__.py - homeassistant/components/faa_delays/binary_sensor.py - homeassistant/components/faa_delays/coordinator.py - homeassistant/components/familyhub/camera.py - homeassistant/components/ffmpeg/camera.py - homeassistant/components/fibaro/__init__.py - homeassistant/components/fibaro/binary_sensor.py - homeassistant/components/fibaro/climate.py - homeassistant/components/fibaro/cover.py - homeassistant/components/fibaro/event.py - homeassistant/components/fibaro/light.py - homeassistant/components/fibaro/lock.py - homeassistant/components/fibaro/sensor.py - homeassistant/components/fibaro/switch.py - homeassistant/components/fints/sensor.py - homeassistant/components/fireservicerota/__init__.py - homeassistant/components/fireservicerota/binary_sensor.py - homeassistant/components/fireservicerota/sensor.py - homeassistant/components/fireservicerota/switch.py - homeassistant/components/firmata/__init__.py - homeassistant/components/firmata/binary_sensor.py - homeassistant/components/firmata/board.py - homeassistant/components/firmata/entity.py - homeassistant/components/firmata/light.py - homeassistant/components/firmata/pin.py - homeassistant/components/firmata/sensor.py - homeassistant/components/firmata/switch.py - homeassistant/components/fivem/__init__.py - homeassistant/components/fivem/binary_sensor.py - homeassistant/components/fivem/coordinator.py - homeassistant/components/fivem/entity.py - homeassistant/components/fivem/sensor.py - homeassistant/components/fixer/sensor.py - homeassistant/components/fjaraskupan/__init__.py - homeassistant/components/fjaraskupan/binary_sensor.py - homeassistant/components/fjaraskupan/coordinator.py - homeassistant/components/fjaraskupan/fan.py - homeassistant/components/fjaraskupan/light.py - homeassistant/components/fjaraskupan/number.py - homeassistant/components/fjaraskupan/sensor.py - homeassistant/components/fleetgo/device_tracker.py - homeassistant/components/flexit/climate.py - homeassistant/components/flexit_bacnet/climate.py - homeassistant/components/flic/binary_sensor.py - homeassistant/components/flick_electric/__init__.py - homeassistant/components/flick_electric/sensor.py - homeassistant/components/flock/notify.py - homeassistant/components/flume/__init__.py - homeassistant/components/flume/binary_sensor.py - homeassistant/components/flume/coordinator.py - homeassistant/components/flume/entity.py - homeassistant/components/flume/sensor.py - homeassistant/components/flume/util.py - homeassistant/components/folder_watcher/__init__.py - homeassistant/components/foobot/sensor.py - homeassistant/components/fortios/device_tracker.py - homeassistant/components/foscam/__init__.py - homeassistant/components/foscam/camera.py - homeassistant/components/foscam/coordinator.py - homeassistant/components/foscam/entity.py - homeassistant/components/foursquare/* - homeassistant/components/free_mobile/notify.py - homeassistant/components/freebox/camera.py - homeassistant/components/freebox/home_base.py - homeassistant/components/freebox/switch.py - homeassistant/components/fritz/coordinator.py - homeassistant/components/fritz/entity.py - homeassistant/components/fritz/services.py - homeassistant/components/fritz/switch.py - homeassistant/components/fritzbox_callmonitor/__init__.py - homeassistant/components/fritzbox_callmonitor/base.py - homeassistant/components/fritzbox_callmonitor/sensor.py - homeassistant/components/frontier_silicon/__init__.py - homeassistant/components/frontier_silicon/browse_media.py - homeassistant/components/frontier_silicon/media_player.py - homeassistant/components/futurenow/light.py - homeassistant/components/garadget/cover.py - homeassistant/components/garages_amsterdam/__init__.py - homeassistant/components/garages_amsterdam/binary_sensor.py - homeassistant/components/garages_amsterdam/entity.py - homeassistant/components/garages_amsterdam/sensor.py - homeassistant/components/gc100/* - homeassistant/components/geniushub/* - homeassistant/components/geocaching/__init__.py - homeassistant/components/geocaching/coordinator.py - homeassistant/components/geocaching/oauth.py - homeassistant/components/geocaching/sensor.py - homeassistant/components/github/coordinator.py - homeassistant/components/gitlab_ci/sensor.py - homeassistant/components/gitter/sensor.py - homeassistant/components/glances/sensor.py - homeassistant/components/goodwe/__init__.py - homeassistant/components/goodwe/button.py - homeassistant/components/goodwe/coordinator.py - homeassistant/components/goodwe/number.py - homeassistant/components/goodwe/select.py - homeassistant/components/goodwe/sensor.py - homeassistant/components/google_cloud/tts.py - homeassistant/components/google_maps/device_tracker.py - homeassistant/components/google_pubsub/__init__.py - homeassistant/components/gpsd/__init__.py - homeassistant/components/gpsd/sensor.py - homeassistant/components/greenwave/light.py - homeassistant/components/growatt_server/__init__.py - homeassistant/components/growatt_server/sensor.py - homeassistant/components/growatt_server/sensor_types/* - homeassistant/components/gstreamer/media_player.py - homeassistant/components/gtfs/sensor.py - homeassistant/components/guardian/__init__.py - homeassistant/components/guardian/binary_sensor.py - homeassistant/components/guardian/button.py - homeassistant/components/guardian/coordinator.py - homeassistant/components/guardian/sensor.py - homeassistant/components/guardian/switch.py - homeassistant/components/guardian/util.py - homeassistant/components/guardian/valve.py - homeassistant/components/habitica/__init__.py - homeassistant/components/habitica/coordinator.py - homeassistant/components/habitica/sensor.py - homeassistant/components/harman_kardon_avr/media_player.py - homeassistant/components/harmony/data.py - homeassistant/components/harmony/remote.py - homeassistant/components/harmony/util.py - homeassistant/components/haveibeenpwned/sensor.py - homeassistant/components/heatmiser/climate.py - homeassistant/components/hikvision/binary_sensor.py - homeassistant/components/hikvisioncam/switch.py - homeassistant/components/hisense_aehw4a1/__init__.py - homeassistant/components/hisense_aehw4a1/climate.py - homeassistant/components/hitron_coda/device_tracker.py - homeassistant/components/hive/__init__.py - homeassistant/components/hive/alarm_control_panel.py - homeassistant/components/hive/binary_sensor.py - homeassistant/components/hive/climate.py - homeassistant/components/hive/light.py - homeassistant/components/hive/sensor.py - homeassistant/components/hive/switch.py - homeassistant/components/hive/water_heater.py - homeassistant/components/hko/__init__.py - homeassistant/components/hko/coordinator.py - homeassistant/components/hko/weather.py - homeassistant/components/hlk_sw16/__init__.py - homeassistant/components/hlk_sw16/switch.py - homeassistant/components/home_connect/entity.py - homeassistant/components/home_connect/light.py - homeassistant/components/home_connect/switch.py - homeassistant/components/homematic/__init__.py - homeassistant/components/homematic/binary_sensor.py - homeassistant/components/homematic/climate.py - homeassistant/components/homematic/cover.py - homeassistant/components/homematic/entity.py - homeassistant/components/homematic/light.py - homeassistant/components/homematic/lock.py - homeassistant/components/homematic/notify.py - homeassistant/components/homematic/sensor.py - homeassistant/components/homematic/switch.py - homeassistant/components/horizon/media_player.py - homeassistant/components/hp_ilo/sensor.py - homeassistant/components/huawei_lte/__init__.py - homeassistant/components/huawei_lte/binary_sensor.py - homeassistant/components/huawei_lte/device_tracker.py - homeassistant/components/huawei_lte/notify.py - homeassistant/components/huawei_lte/sensor.py - homeassistant/components/huawei_lte/switch.py - homeassistant/components/hunterdouglas_powerview/__init__.py - homeassistant/components/hunterdouglas_powerview/button.py - homeassistant/components/hunterdouglas_powerview/coordinator.py - homeassistant/components/hunterdouglas_powerview/cover.py - homeassistant/components/hunterdouglas_powerview/entity.py - homeassistant/components/hunterdouglas_powerview/number.py - homeassistant/components/hunterdouglas_powerview/select.py - homeassistant/components/hunterdouglas_powerview/sensor.py - homeassistant/components/hunterdouglas_powerview/shade_data.py - homeassistant/components/hunterdouglas_powerview/util.py - homeassistant/components/huum/__init__.py - homeassistant/components/huum/climate.py - homeassistant/components/hvv_departures/__init__.py - homeassistant/components/hvv_departures/binary_sensor.py - homeassistant/components/hvv_departures/sensor.py - homeassistant/components/ialarm/alarm_control_panel.py - homeassistant/components/iammeter/const.py - homeassistant/components/iammeter/sensor.py - homeassistant/components/iaqualink/binary_sensor.py - homeassistant/components/iaqualink/climate.py - homeassistant/components/iaqualink/light.py - homeassistant/components/iaqualink/sensor.py - homeassistant/components/iaqualink/switch.py - homeassistant/components/icloud/__init__.py - homeassistant/components/icloud/account.py - homeassistant/components/icloud/device_tracker.py - homeassistant/components/icloud/sensor.py - homeassistant/components/idteck_prox/* - homeassistant/components/ifttt/__init__.py - homeassistant/components/ifttt/alarm_control_panel.py - homeassistant/components/iglo/light.py - homeassistant/components/ihc/* - homeassistant/components/incomfort/__init__.py - homeassistant/components/incomfort/climate.py - homeassistant/components/incomfort/water_heater.py - homeassistant/components/insteon/binary_sensor.py - homeassistant/components/insteon/climate.py - homeassistant/components/insteon/cover.py - homeassistant/components/insteon/fan.py - homeassistant/components/insteon/insteon_entity.py - homeassistant/components/insteon/light.py - homeassistant/components/insteon/schemas.py - homeassistant/components/insteon/switch.py - homeassistant/components/insteon/utils.py - homeassistant/components/intellifire/__init__.py - homeassistant/components/intellifire/binary_sensor.py - homeassistant/components/intellifire/climate.py - homeassistant/components/intellifire/coordinator.py - homeassistant/components/intellifire/entity.py - homeassistant/components/intellifire/fan.py - homeassistant/components/intellifire/light.py - homeassistant/components/intellifire/number.py - homeassistant/components/intellifire/sensor.py - homeassistant/components/intellifire/switch.py - homeassistant/components/intesishome/* - homeassistant/components/ios/__init__.py - homeassistant/components/ios/notify.py - homeassistant/components/ios/sensor.py - homeassistant/components/iperf3/* - homeassistant/components/iqvia/__init__.py - homeassistant/components/iqvia/sensor.py - homeassistant/components/irish_rail_transport/sensor.py - homeassistant/components/iss/__init__.py - homeassistant/components/iss/sensor.py - homeassistant/components/ista_ecotrend/coordinator.py - homeassistant/components/isy994/__init__.py - homeassistant/components/isy994/binary_sensor.py - homeassistant/components/isy994/button.py - homeassistant/components/isy994/climate.py - homeassistant/components/isy994/cover.py - homeassistant/components/isy994/entity.py - homeassistant/components/isy994/fan.py - homeassistant/components/isy994/helpers.py - homeassistant/components/isy994/light.py - homeassistant/components/isy994/lock.py - homeassistant/components/isy994/models.py - homeassistant/components/isy994/number.py - homeassistant/components/isy994/select.py - homeassistant/components/isy994/sensor.py - homeassistant/components/isy994/services.py - homeassistant/components/isy994/switch.py - homeassistant/components/isy994/util.py - homeassistant/components/itach/remote.py - homeassistant/components/itunes/media_player.py - homeassistant/components/izone/__init__.py - homeassistant/components/izone/climate.py - homeassistant/components/izone/discovery.py - homeassistant/components/joaoapps_join/* - homeassistant/components/juicenet/__init__.py - homeassistant/components/juicenet/device.py - homeassistant/components/juicenet/entity.py - homeassistant/components/juicenet/number.py - homeassistant/components/juicenet/sensor.py - homeassistant/components/juicenet/switch.py - homeassistant/components/justnimbus/coordinator.py - homeassistant/components/justnimbus/entity.py - homeassistant/components/justnimbus/sensor.py - homeassistant/components/kaiterra/* - homeassistant/components/kankun/switch.py - homeassistant/components/keba/* - homeassistant/components/keenetic_ndms2/__init__.py - homeassistant/components/keenetic_ndms2/binary_sensor.py - homeassistant/components/keenetic_ndms2/device_tracker.py - homeassistant/components/keenetic_ndms2/router.py - homeassistant/components/kef/* - homeassistant/components/keyboard/* - homeassistant/components/keyboard_remote/* - homeassistant/components/keymitt_ble/__init__.py - homeassistant/components/keymitt_ble/coordinator.py - homeassistant/components/keymitt_ble/entity.py - homeassistant/components/keymitt_ble/switch.py - homeassistant/components/kitchen_sink/weather.py - homeassistant/components/kiwi/lock.py - homeassistant/components/kodi/__init__.py - homeassistant/components/kodi/browse_media.py - homeassistant/components/kodi/media_player.py - homeassistant/components/kodi/notify.py - homeassistant/components/konnected/__init__.py - homeassistant/components/konnected/panel.py - homeassistant/components/konnected/switch.py - homeassistant/components/kostal_plenticore/__init__.py - homeassistant/components/kostal_plenticore/coordinator.py - homeassistant/components/kostal_plenticore/helper.py - homeassistant/components/kostal_plenticore/select.py - homeassistant/components/kostal_plenticore/sensor.py - homeassistant/components/kostal_plenticore/switch.py - homeassistant/components/kwb/sensor.py - homeassistant/components/lacrosse/sensor.py - homeassistant/components/lannouncer/notify.py - homeassistant/components/launch_library/__init__.py - homeassistant/components/launch_library/sensor.py - homeassistant/components/lcn/climate.py - homeassistant/components/lcn/helpers.py - homeassistant/components/lcn/services.py - homeassistant/components/ld2410_ble/__init__.py - homeassistant/components/ld2410_ble/binary_sensor.py - homeassistant/components/ld2410_ble/coordinator.py - homeassistant/components/ld2410_ble/sensor.py - homeassistant/components/led_ble/__init__.py - homeassistant/components/led_ble/light.py - homeassistant/components/lg_netcast/media_player.py - homeassistant/components/lg_soundbar/__init__.py - homeassistant/components/lg_soundbar/media_player.py - homeassistant/components/lightwave/* - homeassistant/components/limitlessled/light.py - homeassistant/components/linksys_smart/device_tracker.py - homeassistant/components/linode/* - homeassistant/components/linux_battery/sensor.py - homeassistant/components/lirc/* - homeassistant/components/livisi/__init__.py - homeassistant/components/livisi/binary_sensor.py - homeassistant/components/livisi/climate.py - homeassistant/components/livisi/coordinator.py - homeassistant/components/livisi/entity.py - homeassistant/components/livisi/switch.py - homeassistant/components/llamalab_automate/notify.py - homeassistant/components/logi_circle/__init__.py - homeassistant/components/logi_circle/camera.py - homeassistant/components/logi_circle/sensor.py - homeassistant/components/london_underground/sensor.py - homeassistant/components/lookin/__init__.py - homeassistant/components/lookin/climate.py - homeassistant/components/lookin/coordinator.py - homeassistant/components/lookin/entity.py - homeassistant/components/lookin/light.py - homeassistant/components/lookin/media_player.py - homeassistant/components/lookin/sensor.py - homeassistant/components/loqed/sensor.py - homeassistant/components/luci/device_tracker.py - homeassistant/components/lupusec/__init__.py - homeassistant/components/lupusec/alarm_control_panel.py - homeassistant/components/lupusec/binary_sensor.py - homeassistant/components/lupusec/entity.py - homeassistant/components/lupusec/switch.py - homeassistant/components/lutron/__init__.py - homeassistant/components/lutron/binary_sensor.py - homeassistant/components/lutron/cover.py - homeassistant/components/lutron/entity.py - homeassistant/components/lutron/event.py - homeassistant/components/lutron/fan.py - homeassistant/components/lutron/light.py - homeassistant/components/lutron/switch.py - homeassistant/components/lutron_caseta/__init__.py - homeassistant/components/lutron_caseta/binary_sensor.py - homeassistant/components/lutron_caseta/cover.py - homeassistant/components/lutron_caseta/fan.py - homeassistant/components/lutron_caseta/light.py - homeassistant/components/lutron_caseta/switch.py - homeassistant/components/lw12wifi/light.py - homeassistant/components/lyric/__init__.py - homeassistant/components/lyric/api.py - homeassistant/components/lyric/climate.py - homeassistant/components/lyric/sensor.py - homeassistant/components/mailgun/notify.py - homeassistant/components/mastodon/notify.py - homeassistant/components/matrix/__init__.py - homeassistant/components/matrix/notify.py - homeassistant/components/matter/__init__.py - homeassistant/components/matter/fan.py - homeassistant/components/meater/__init__.py - homeassistant/components/meater/sensor.py - homeassistant/components/medcom_ble/__init__.py - homeassistant/components/medcom_ble/sensor.py - homeassistant/components/mediaroom/media_player.py - homeassistant/components/melcloud/__init__.py - homeassistant/components/melcloud/climate.py - homeassistant/components/melcloud/sensor.py - homeassistant/components/melcloud/water_heater.py - homeassistant/components/melnor/__init__.py - homeassistant/components/message_bird/notify.py - homeassistant/components/met/weather.py - homeassistant/components/met_eireann/__init__.py - homeassistant/components/met_eireann/weather.py - homeassistant/components/meteo_france/__init__.py - homeassistant/components/meteo_france/sensor.py - homeassistant/components/meteo_france/weather.py - homeassistant/components/meteoalarm/* - homeassistant/components/meteoclimatic/__init__.py - homeassistant/components/meteoclimatic/sensor.py - homeassistant/components/meteoclimatic/weather.py - homeassistant/components/microbees/__init__.py - homeassistant/components/microbees/api.py - homeassistant/components/microbees/application_credentials.py - homeassistant/components/microbees/binary_sensor.py - homeassistant/components/microbees/button.py - homeassistant/components/microbees/climate.py - homeassistant/components/microbees/coordinator.py - homeassistant/components/microbees/cover.py - homeassistant/components/microbees/entity.py - homeassistant/components/microbees/light.py - homeassistant/components/microbees/sensor.py - homeassistant/components/microbees/switch.py - homeassistant/components/microsoft/tts.py - homeassistant/components/mikrotik/coordinator.py - homeassistant/components/mill/climate.py - homeassistant/components/mill/sensor.py - homeassistant/components/minio/minio_helper.py - homeassistant/components/mjpeg/camera.py - homeassistant/components/mjpeg/util.py - homeassistant/components/mochad/__init__.py - homeassistant/components/mochad/light.py - homeassistant/components/mochad/switch.py - homeassistant/components/modem_callerid/button.py - homeassistant/components/modem_callerid/sensor.py - homeassistant/components/moehlenhoff_alpha2/climate.py - homeassistant/components/moehlenhoff_alpha2/coordinator.py - homeassistant/components/monzo/__init__.py - homeassistant/components/monzo/api.py - homeassistant/components/motion_blinds/__init__.py - homeassistant/components/motion_blinds/coordinator.py - homeassistant/components/motion_blinds/cover.py - homeassistant/components/motion_blinds/entity.py - homeassistant/components/motion_blinds/sensor.py - homeassistant/components/motionblinds_ble/__init__.py - homeassistant/components/motionblinds_ble/button.py - homeassistant/components/motionblinds_ble/cover.py - homeassistant/components/motionblinds_ble/entity.py - homeassistant/components/motionblinds_ble/select.py - homeassistant/components/motionblinds_ble/sensor.py - homeassistant/components/motionmount/__init__.py - homeassistant/components/motionmount/binary_sensor.py - homeassistant/components/motionmount/entity.py - homeassistant/components/motionmount/number.py - homeassistant/components/motionmount/select.py - homeassistant/components/motionmount/sensor.py - homeassistant/components/mpd/media_player.py - homeassistant/components/mqtt_room/sensor.py - homeassistant/components/msteams/notify.py - homeassistant/components/mullvad/__init__.py - homeassistant/components/mullvad/binary_sensor.py - homeassistant/components/mutesync/__init__.py - homeassistant/components/mutesync/binary_sensor.py - homeassistant/components/mvglive/sensor.py - homeassistant/components/mycroft/* - homeassistant/components/mysensors/__init__.py - homeassistant/components/mysensors/climate.py - homeassistant/components/mysensors/cover.py - homeassistant/components/mysensors/gateway.py - homeassistant/components/mysensors/handler.py - homeassistant/components/mysensors/helpers.py - homeassistant/components/mysensors/light.py - homeassistant/components/mysensors/switch.py - homeassistant/components/mystrom/binary_sensor.py - homeassistant/components/mystrom/light.py - homeassistant/components/mystrom/sensor.py - homeassistant/components/mystrom/switch.py - homeassistant/components/myuplink/__init__.py - homeassistant/components/myuplink/api.py - homeassistant/components/myuplink/application_credentials.py - homeassistant/components/myuplink/coordinator.py - homeassistant/components/myuplink/entity.py - homeassistant/components/myuplink/helpers.py - homeassistant/components/myuplink/sensor.py - homeassistant/components/nad/media_player.py - homeassistant/components/nanoleaf/__init__.py - homeassistant/components/nanoleaf/button.py - homeassistant/components/nanoleaf/coordinator.py - homeassistant/components/nanoleaf/entity.py - homeassistant/components/nanoleaf/event.py - homeassistant/components/nanoleaf/light.py - homeassistant/components/neato/__init__.py - homeassistant/components/neato/api.py - homeassistant/components/neato/button.py - homeassistant/components/neato/camera.py - homeassistant/components/neato/entity.py - homeassistant/components/neato/hub.py - homeassistant/components/neato/sensor.py - homeassistant/components/neato/switch.py - homeassistant/components/neato/vacuum.py - homeassistant/components/nederlandse_spoorwegen/sensor.py - homeassistant/components/netdata/sensor.py - homeassistant/components/netgear/__init__.py - homeassistant/components/netgear/button.py - homeassistant/components/netgear/device_tracker.py - homeassistant/components/netgear/entity.py - homeassistant/components/netgear/router.py - homeassistant/components/netgear/sensor.py - homeassistant/components/netgear/switch.py - homeassistant/components/netgear/update.py - homeassistant/components/netgear_lte/__init__.py - homeassistant/components/netgear_lte/notify.py - homeassistant/components/netio/switch.py - homeassistant/components/neurio_energy/sensor.py - homeassistant/components/nexia/climate.py - homeassistant/components/nexia/entity.py - homeassistant/components/nexia/switch.py - homeassistant/components/nextcloud/__init__.py - homeassistant/components/nextcloud/binary_sensor.py - homeassistant/components/nextcloud/coordinator.py - homeassistant/components/nextcloud/entity.py - homeassistant/components/nextcloud/sensor.py - homeassistant/components/nextcloud/update.py - homeassistant/components/nfandroidtv/__init__.py - homeassistant/components/nfandroidtv/notify.py - homeassistant/components/nibe_heatpump/__init__.py - homeassistant/components/nibe_heatpump/binary_sensor.py - homeassistant/components/nibe_heatpump/select.py - homeassistant/components/nibe_heatpump/sensor.py - homeassistant/components/nibe_heatpump/switch.py - homeassistant/components/nibe_heatpump/water_heater.py - homeassistant/components/niko_home_control/light.py - homeassistant/components/nilu/air_quality.py - homeassistant/components/nissan_leaf/* - homeassistant/components/nmap_tracker/__init__.py - homeassistant/components/nmap_tracker/device_tracker.py - homeassistant/components/nmbs/sensor.py - homeassistant/components/noaa_tides/sensor.py - homeassistant/components/nobo_hub/__init__.py - homeassistant/components/nobo_hub/climate.py - homeassistant/components/nobo_hub/select.py - homeassistant/components/nobo_hub/sensor.py - homeassistant/components/norway_air/air_quality.py - homeassistant/components/notify_events/notify.py - homeassistant/components/notion/__init__.py - homeassistant/components/notion/binary_sensor.py - homeassistant/components/notion/coordinator.py - homeassistant/components/notion/sensor.py - homeassistant/components/notion/util.py - homeassistant/components/nsw_fuel_station/sensor.py - homeassistant/components/nuki/__init__.py - homeassistant/components/nuki/coordinator.py - homeassistant/components/nuki/lock.py - homeassistant/components/nx584/alarm_control_panel.py - homeassistant/components/oasa_telematics/sensor.py - homeassistant/components/obihai/__init__.py - homeassistant/components/obihai/button.py - homeassistant/components/obihai/connectivity.py - homeassistant/components/obihai/sensor.py - homeassistant/components/octoprint/__init__.py - homeassistant/components/octoprint/coordinator.py - homeassistant/components/oem/climate.py - homeassistant/components/ohmconnect/sensor.py - homeassistant/components/ombi/* - homeassistant/components/omnilogic/__init__.py - homeassistant/components/omnilogic/coordinator.py - homeassistant/components/omnilogic/sensor.py - homeassistant/components/omnilogic/switch.py - homeassistant/components/ondilo_ico/__init__.py - homeassistant/components/ondilo_ico/api.py - homeassistant/components/ondilo_ico/coordinator.py - homeassistant/components/ondilo_ico/sensor.py - homeassistant/components/onkyo/media_player.py - homeassistant/components/onvif/__init__.py - homeassistant/components/onvif/binary_sensor.py - homeassistant/components/onvif/camera.py - homeassistant/components/onvif/device.py - homeassistant/components/onvif/event.py - homeassistant/components/onvif/parsers.py - homeassistant/components/onvif/sensor.py - homeassistant/components/onvif/util.py - homeassistant/components/open_meteo/weather.py - homeassistant/components/openevse/sensor.py - homeassistant/components/openexchangerates/__init__.py - homeassistant/components/openexchangerates/coordinator.py - homeassistant/components/openexchangerates/sensor.py - homeassistant/components/opengarage/__init__.py - homeassistant/components/opengarage/binary_sensor.py - homeassistant/components/opengarage/cover.py - homeassistant/components/opengarage/entity.py - homeassistant/components/opengarage/sensor.py - homeassistant/components/openhardwaremonitor/sensor.py - homeassistant/components/openhome/__init__.py - homeassistant/components/openhome/media_player.py - homeassistant/components/opensensemap/air_quality.py - homeassistant/components/opentherm_gw/__init__.py - homeassistant/components/opentherm_gw/binary_sensor.py - homeassistant/components/opentherm_gw/climate.py - homeassistant/components/opentherm_gw/sensor.py - homeassistant/components/openuv/__init__.py - homeassistant/components/openuv/binary_sensor.py - homeassistant/components/openuv/coordinator.py - homeassistant/components/openuv/sensor.py - homeassistant/components/openweathermap/__init__.py - homeassistant/components/openweathermap/coordinator.py - homeassistant/components/openweathermap/repairs.py - homeassistant/components/openweathermap/sensor.py - homeassistant/components/openweathermap/weather.py - homeassistant/components/opnsense/__init__.py - homeassistant/components/opnsense/device_tracker.py - homeassistant/components/opower/__init__.py - homeassistant/components/opower/coordinator.py - homeassistant/components/opower/sensor.py - homeassistant/components/opple/light.py - homeassistant/components/oru/* - homeassistant/components/orvibo/switch.py - homeassistant/components/osoenergy/__init__.py - homeassistant/components/osoenergy/binary_sensor.py - homeassistant/components/osoenergy/entity.py - homeassistant/components/osoenergy/sensor.py - homeassistant/components/osoenergy/water_heater.py - homeassistant/components/osramlightify/light.py - homeassistant/components/otp/sensor.py - homeassistant/components/overkiz/__init__.py - homeassistant/components/overkiz/alarm_control_panel.py - homeassistant/components/overkiz/binary_sensor.py - homeassistant/components/overkiz/button.py - homeassistant/components/overkiz/climate.py - homeassistant/components/overkiz/climate_entities/* - homeassistant/components/overkiz/coordinator.py - homeassistant/components/overkiz/cover.py - homeassistant/components/overkiz/cover_entities/* - homeassistant/components/overkiz/entity.py - homeassistant/components/overkiz/executor.py - homeassistant/components/overkiz/light.py - homeassistant/components/overkiz/lock.py - homeassistant/components/overkiz/number.py - homeassistant/components/overkiz/select.py - homeassistant/components/overkiz/sensor.py - homeassistant/components/overkiz/siren.py - homeassistant/components/overkiz/switch.py - homeassistant/components/overkiz/water_heater.py - homeassistant/components/overkiz/water_heater_entities/* - homeassistant/components/ovo_energy/__init__.py - homeassistant/components/ovo_energy/sensor.py - homeassistant/components/panasonic_bluray/media_player.py - homeassistant/components/panasonic_viera/media_player.py - homeassistant/components/pandora/media_player.py - homeassistant/components/pencom/switch.py - homeassistant/components/permobil/__init__.py - homeassistant/components/permobil/binary_sensor.py - homeassistant/components/permobil/coordinator.py - homeassistant/components/permobil/entity.py - homeassistant/components/permobil/sensor.py - homeassistant/components/philips_js/__init__.py - homeassistant/components/philips_js/coordinator.py - homeassistant/components/philips_js/light.py - homeassistant/components/philips_js/media_player.py - homeassistant/components/philips_js/remote.py - homeassistant/components/philips_js/switch.py - homeassistant/components/pi_hole/sensor.py - homeassistant/components/picotts/tts.py - homeassistant/components/pilight/base_class.py - homeassistant/components/pilight/binary_sensor.py - homeassistant/components/pilight/light.py - homeassistant/components/pilight/switch.py - homeassistant/components/ping/__init__.py - homeassistant/components/ping/helpers.py - homeassistant/components/pioneer/media_player.py - homeassistant/components/plaato/__init__.py - homeassistant/components/plaato/binary_sensor.py - homeassistant/components/plaato/entity.py - homeassistant/components/plaato/sensor.py - homeassistant/components/plex/cast.py - homeassistant/components/plex/media_player.py - homeassistant/components/plex/view.py - homeassistant/components/plum_lightpad/light.py - homeassistant/components/pocketcasts/sensor.py - homeassistant/components/point/__init__.py - homeassistant/components/point/alarm_control_panel.py - homeassistant/components/point/binary_sensor.py - homeassistant/components/point/sensor.py - homeassistant/components/powerwall/__init__.py - homeassistant/components/progettihwsw/__init__.py - homeassistant/components/progettihwsw/binary_sensor.py - homeassistant/components/progettihwsw/switch.py - homeassistant/components/proliphix/climate.py - homeassistant/components/prowl/notify.py - homeassistant/components/proxmoxve/* - homeassistant/components/proxy/camera.py - homeassistant/components/pulseaudio_loopback/switch.py - homeassistant/components/purpleair/coordinator.py - homeassistant/components/pushbullet/api.py - homeassistant/components/pushbullet/notify.py - homeassistant/components/pushbullet/sensor.py - homeassistant/components/pushover/notify.py - homeassistant/components/pushsafer/notify.py - homeassistant/components/qbittorrent/__init__.py - homeassistant/components/qbittorrent/coordinator.py - homeassistant/components/qbittorrent/sensor.py - homeassistant/components/qnap/__init__.py - homeassistant/components/qnap/coordinator.py - homeassistant/components/qnap/sensor.py - homeassistant/components/qrcode/image_processing.py - homeassistant/components/quantum_gateway/device_tracker.py - homeassistant/components/qvr_pro/* - homeassistant/components/rabbitair/__init__.py - homeassistant/components/rabbitair/coordinator.py - homeassistant/components/rabbitair/entity.py - homeassistant/components/rabbitair/fan.py - homeassistant/components/rachio/__init__.py - homeassistant/components/rachio/binary_sensor.py - homeassistant/components/rachio/coordinator.py - homeassistant/components/rachio/device.py - homeassistant/components/rachio/entity.py - homeassistant/components/rachio/switch.py - homeassistant/components/rachio/webhooks.py - homeassistant/components/radio_browser/__init__.py - homeassistant/components/radiotherm/__init__.py - homeassistant/components/radiotherm/climate.py - homeassistant/components/radiotherm/coordinator.py - homeassistant/components/radiotherm/data.py - homeassistant/components/radiotherm/entity.py - homeassistant/components/radiotherm/switch.py - homeassistant/components/radiotherm/util.py - homeassistant/components/raincloud/* - homeassistant/components/rainmachine/__init__.py - homeassistant/components/rainmachine/binary_sensor.py - homeassistant/components/rainmachine/button.py - homeassistant/components/rainmachine/coordinator.py - homeassistant/components/rainmachine/select.py - homeassistant/components/rainmachine/sensor.py - homeassistant/components/rainmachine/switch.py - homeassistant/components/rainmachine/update.py - homeassistant/components/rainmachine/util.py - homeassistant/components/raspyrfm/* - homeassistant/components/recollect_waste/sensor.py - homeassistant/components/recorder/repack.py - homeassistant/components/recswitch/switch.py - homeassistant/components/reddit/sensor.py - homeassistant/components/refoss/__init__.py - homeassistant/components/refoss/bridge.py - homeassistant/components/refoss/coordinator.py - homeassistant/components/refoss/entity.py - homeassistant/components/refoss/sensor.py - homeassistant/components/refoss/switch.py - homeassistant/components/refoss/util.py - homeassistant/components/rejseplanen/sensor.py - homeassistant/components/remember_the_milk/__init__.py - homeassistant/components/remote_rpi_gpio/* - homeassistant/components/renson/__init__.py - homeassistant/components/renson/binary_sensor.py - homeassistant/components/renson/button.py - homeassistant/components/renson/coordinator.py - homeassistant/components/renson/entity.py - homeassistant/components/renson/fan.py - homeassistant/components/renson/number.py - homeassistant/components/renson/sensor.py - homeassistant/components/renson/switch.py - homeassistant/components/renson/time.py - homeassistant/components/reolink/binary_sensor.py - homeassistant/components/reolink/button.py - homeassistant/components/reolink/camera.py - homeassistant/components/reolink/entity.py - homeassistant/components/reolink/host.py - homeassistant/components/reolink/light.py - homeassistant/components/reolink/number.py - homeassistant/components/reolink/select.py - homeassistant/components/reolink/sensor.py - homeassistant/components/reolink/siren.py - homeassistant/components/reolink/switch.py - homeassistant/components/reolink/update.py - homeassistant/components/repetier/__init__.py - homeassistant/components/repetier/sensor.py - homeassistant/components/rest/notify.py - homeassistant/components/rest/switch.py - homeassistant/components/ridwell/__init__.py - homeassistant/components/ridwell/calendar.py - homeassistant/components/ridwell/coordinator.py - homeassistant/components/ridwell/switch.py - homeassistant/components/ring/camera.py - homeassistant/components/ripple/sensor.py - homeassistant/components/roborock/coordinator.py - homeassistant/components/rocketchat/notify.py - homeassistant/components/romy/__init__.py - homeassistant/components/romy/binary_sensor.py - homeassistant/components/romy/coordinator.py - homeassistant/components/romy/entity.py - homeassistant/components/romy/sensor.py - homeassistant/components/romy/vacuum.py - homeassistant/components/roomba/__init__.py - homeassistant/components/roomba/binary_sensor.py - homeassistant/components/roomba/braava.py - homeassistant/components/roomba/irobot_base.py - homeassistant/components/roomba/roomba.py - homeassistant/components/roomba/sensor.py - homeassistant/components/roomba/vacuum.py - homeassistant/components/roon/__init__.py - homeassistant/components/roon/event.py - homeassistant/components/roon/media_browser.py - homeassistant/components/roon/media_player.py - homeassistant/components/roon/server.py - homeassistant/components/route53/* - homeassistant/components/rpi_camera/* - homeassistant/components/rtorrent/sensor.py - homeassistant/components/russound_rio/media_player.py - homeassistant/components/russound_rnet/media_player.py - homeassistant/components/ruuvi_gateway/__init__.py - homeassistant/components/ruuvi_gateway/bluetooth.py - homeassistant/components/ruuvi_gateway/coordinator.py - homeassistant/components/rympro/__init__.py - homeassistant/components/rympro/coordinator.py - homeassistant/components/rympro/sensor.py - homeassistant/components/sabnzbd/__init__.py - homeassistant/components/sabnzbd/coordinator.py - homeassistant/components/sabnzbd/sensor.py - homeassistant/components/saj/sensor.py - homeassistant/components/satel_integra/* - homeassistant/components/schluter/* - homeassistant/components/screenlogic/binary_sensor.py - homeassistant/components/screenlogic/climate.py - homeassistant/components/screenlogic/coordinator.py - homeassistant/components/screenlogic/entity.py - homeassistant/components/screenlogic/light.py - homeassistant/components/screenlogic/number.py - homeassistant/components/screenlogic/sensor.py - homeassistant/components/screenlogic/switch.py - homeassistant/components/scsgate/* - homeassistant/components/sendgrid/notify.py - homeassistant/components/sense/__init__.py - homeassistant/components/sense/binary_sensor.py - homeassistant/components/sense/sensor.py - homeassistant/components/senz/__init__.py - homeassistant/components/senz/api.py - homeassistant/components/senz/climate.py - homeassistant/components/serial/sensor.py - homeassistant/components/serial_pm/sensor.py - homeassistant/components/sesame/lock.py - homeassistant/components/seven_segments/image_processing.py - homeassistant/components/shodan/sensor.py - homeassistant/components/sia/__init__.py - homeassistant/components/sia/alarm_control_panel.py - homeassistant/components/sia/binary_sensor.py - homeassistant/components/sia/hub.py - homeassistant/components/sia/sia_entity_base.py - homeassistant/components/sia/utils.py - homeassistant/components/simplepush/__init__.py - homeassistant/components/simplepush/notify.py - homeassistant/components/simplisafe/__init__.py - homeassistant/components/simplisafe/alarm_control_panel.py - homeassistant/components/simplisafe/binary_sensor.py - homeassistant/components/simplisafe/button.py - homeassistant/components/simplisafe/lock.py - homeassistant/components/simplisafe/sensor.py - homeassistant/components/sinch/* - homeassistant/components/sisyphus/* - homeassistant/components/sky_hub/* - homeassistant/components/skybeacon/sensor.py - homeassistant/components/skybell/__init__.py - homeassistant/components/skybell/camera.py - homeassistant/components/skybell/light.py - homeassistant/components/skybell/sensor.py - homeassistant/components/skybell/switch.py - homeassistant/components/slack/__init__.py - homeassistant/components/slack/notify.py - homeassistant/components/slack/sensor.py - homeassistant/components/slide/* - homeassistant/components/slimproto/__init__.py - homeassistant/components/slimproto/media_player.py - homeassistant/components/sma/__init__.py - homeassistant/components/sma/sensor.py - homeassistant/components/smappee/__init__.py - homeassistant/components/smappee/api.py - homeassistant/components/smappee/binary_sensor.py - homeassistant/components/smappee/sensor.py - homeassistant/components/smappee/switch.py - homeassistant/components/smarty/* - homeassistant/components/sms/__init__.py - homeassistant/components/sms/coordinator.py - homeassistant/components/sms/gateway.py - homeassistant/components/sms/notify.py - homeassistant/components/sms/sensor.py - homeassistant/components/smtp/notify.py - homeassistant/components/snapcast/__init__.py - homeassistant/components/snapcast/media_player.py - homeassistant/components/snapcast/server.py - homeassistant/components/snmp/device_tracker.py - homeassistant/components/snmp/sensor.py - homeassistant/components/snmp/switch.py - homeassistant/components/snooz/__init__.py - homeassistant/components/solaredge/__init__.py - homeassistant/components/solaredge/coordinator.py - homeassistant/components/solaredge_local/sensor.py - homeassistant/components/solax/__init__.py - homeassistant/components/solax/sensor.py - homeassistant/components/soma/__init__.py - homeassistant/components/soma/cover.py - homeassistant/components/soma/sensor.py - homeassistant/components/soma/utils.py - homeassistant/components/somfy_mylink/__init__.py - homeassistant/components/somfy_mylink/cover.py - homeassistant/components/sonos/__init__.py - homeassistant/components/sonos/alarms.py - homeassistant/components/sonos/entity.py - homeassistant/components/sonos/favorites.py - homeassistant/components/sonos/helpers.py - homeassistant/components/sonos/household_coordinator.py - homeassistant/components/sonos/media.py - homeassistant/components/sonos/media_browser.py - homeassistant/components/sonos/media_player.py - homeassistant/components/sonos/speaker.py - homeassistant/components/sonos/switch.py - homeassistant/components/sony_projector/switch.py - homeassistant/components/spc/__init__.py - homeassistant/components/spc/alarm_control_panel.py - homeassistant/components/spc/binary_sensor.py - homeassistant/components/spider/__init__.py - homeassistant/components/spider/climate.py - homeassistant/components/spider/sensor.py - homeassistant/components/spider/switch.py - homeassistant/components/splunk/* - homeassistant/components/spotify/__init__.py - homeassistant/components/spotify/browse_media.py - homeassistant/components/spotify/media_player.py - homeassistant/components/spotify/system_health.py - homeassistant/components/spotify/util.py - homeassistant/components/squeezebox/__init__.py - homeassistant/components/squeezebox/browse_media.py - homeassistant/components/squeezebox/media_player.py - homeassistant/components/starline/__init__.py - homeassistant/components/starline/account.py - homeassistant/components/starline/binary_sensor.py - homeassistant/components/starline/button.py - homeassistant/components/starline/device_tracker.py - homeassistant/components/starline/entity.py - homeassistant/components/starline/lock.py - homeassistant/components/starline/sensor.py - homeassistant/components/starline/switch.py - homeassistant/components/starlingbank/sensor.py - homeassistant/components/starlink/__init__.py - homeassistant/components/starlink/binary_sensor.py - homeassistant/components/starlink/button.py - homeassistant/components/starlink/coordinator.py - homeassistant/components/starlink/device_tracker.py - homeassistant/components/starlink/sensor.py - homeassistant/components/starlink/switch.py - homeassistant/components/starlink/time.py - homeassistant/components/steam_online/sensor.py - homeassistant/components/stiebel_eltron/* - homeassistant/components/stookalert/__init__.py - homeassistant/components/stookalert/binary_sensor.py - homeassistant/components/stookwijzer/__init__.py - homeassistant/components/stookwijzer/sensor.py - homeassistant/components/stream/__init__.py - homeassistant/components/stream/core.py - homeassistant/components/stream/fmp4utils.py - homeassistant/components/stream/hls.py - homeassistant/components/stream/worker.py - homeassistant/components/streamlabswater/__init__.py - homeassistant/components/streamlabswater/binary_sensor.py - homeassistant/components/streamlabswater/coordinator.py - homeassistant/components/streamlabswater/sensor.py - homeassistant/components/suez_water/__init__.py - homeassistant/components/suez_water/sensor.py - homeassistant/components/supervisord/sensor.py - homeassistant/components/supla/* - homeassistant/components/surepetcare/__init__.py - homeassistant/components/surepetcare/binary_sensor.py - homeassistant/components/surepetcare/coordinator.py - homeassistant/components/surepetcare/entity.py - homeassistant/components/surepetcare/sensor.py - homeassistant/components/swiss_hydrological_data/sensor.py - homeassistant/components/swiss_public_transport/__init__.py - homeassistant/components/swiss_public_transport/coordinator.py - homeassistant/components/swiss_public_transport/sensor.py - homeassistant/components/swisscom/device_tracker.py - homeassistant/components/switchbee/__init__.py - homeassistant/components/switchbee/button.py - homeassistant/components/switchbee/climate.py - homeassistant/components/switchbee/coordinator.py - homeassistant/components/switchbee/cover.py - homeassistant/components/switchbee/entity.py - homeassistant/components/switchbee/light.py - homeassistant/components/switchbee/switch.py - homeassistant/components/switchbot/__init__.py - homeassistant/components/switchbot/binary_sensor.py - homeassistant/components/switchbot/coordinator.py - homeassistant/components/switchbot/cover.py - homeassistant/components/switchbot/entity.py - homeassistant/components/switchbot/humidifier.py - homeassistant/components/switchbot/light.py - homeassistant/components/switchbot/lock.py - homeassistant/components/switchbot/sensor.py - homeassistant/components/switchbot/switch.py - homeassistant/components/switchbot_cloud/climate.py - homeassistant/components/switchbot_cloud/coordinator.py - homeassistant/components/switchbot_cloud/entity.py - homeassistant/components/switchbot_cloud/sensor.py - homeassistant/components/switchbot_cloud/switch.py - homeassistant/components/switchmate/switch.py - homeassistant/components/syncthing/__init__.py - homeassistant/components/syncthing/sensor.py - homeassistant/components/syncthru/__init__.py - homeassistant/components/syncthru/sensor.py - homeassistant/components/synology_chat/notify.py - homeassistant/components/synology_dsm/__init__.py - homeassistant/components/synology_dsm/binary_sensor.py - homeassistant/components/synology_dsm/button.py - homeassistant/components/synology_dsm/camera.py - homeassistant/components/synology_dsm/common.py - homeassistant/components/synology_dsm/coordinator.py - homeassistant/components/synology_dsm/entity.py - homeassistant/components/synology_dsm/sensor.py - homeassistant/components/synology_dsm/service.py - homeassistant/components/synology_dsm/switch.py - homeassistant/components/synology_dsm/update.py - homeassistant/components/synology_srm/device_tracker.py - homeassistant/components/syslog/notify.py - homeassistant/components/system_bridge/__init__.py - homeassistant/components/system_bridge/binary_sensor.py - homeassistant/components/system_bridge/coordinator.py - homeassistant/components/system_bridge/entity.py - homeassistant/components/system_bridge/media_player.py - homeassistant/components/system_bridge/notify.py - homeassistant/components/system_bridge/sensor.py - homeassistant/components/system_bridge/update.py - homeassistant/components/tado/__init__.py - homeassistant/components/tado/binary_sensor.py - homeassistant/components/tado/climate.py - homeassistant/components/tado/device_tracker.py - homeassistant/components/tado/sensor.py - homeassistant/components/tado/water_heater.py - homeassistant/components/tami4/button.py - homeassistant/components/tank_utility/sensor.py - homeassistant/components/tapsaff/binary_sensor.py - homeassistant/components/tautulli/__init__.py - homeassistant/components/tautulli/coordinator.py - homeassistant/components/tautulli/sensor.py - homeassistant/components/ted5000/sensor.py - homeassistant/components/telegram/notify.py - homeassistant/components/telegram_bot/__init__.py - homeassistant/components/telegram_bot/polling.py - homeassistant/components/telegram_bot/webhooks.py - homeassistant/components/tellduslive/__init__.py - homeassistant/components/tellduslive/binary_sensor.py - homeassistant/components/tellduslive/cover.py - homeassistant/components/tellduslive/entry.py - homeassistant/components/tellduslive/light.py - homeassistant/components/tellduslive/sensor.py - homeassistant/components/tellduslive/switch.py - homeassistant/components/tellstick/* - homeassistant/components/telnet/switch.py - homeassistant/components/temper/sensor.py - homeassistant/components/tensorflow/image_processing.py - homeassistant/components/tfiac/climate.py - homeassistant/components/thermoworks_smoke/sensor.py - homeassistant/components/thingspeak/* - homeassistant/components/thinkingcleaner/* - homeassistant/components/thomson/device_tracker.py - homeassistant/components/tibber/__init__.py - homeassistant/components/tibber/coordinator.py - homeassistant/components/tibber/sensor.py - homeassistant/components/tikteck/light.py - homeassistant/components/tile/__init__.py - homeassistant/components/tile/device_tracker.py - homeassistant/components/time_date/sensor.py - homeassistant/components/tmb/sensor.py - homeassistant/components/todoist/calendar.py - homeassistant/components/tolo/__init__.py - homeassistant/components/tolo/binary_sensor.py - homeassistant/components/tolo/button.py - homeassistant/components/tolo/climate.py - homeassistant/components/tolo/fan.py - homeassistant/components/tolo/light.py - homeassistant/components/tolo/number.py - homeassistant/components/tolo/select.py - homeassistant/components/tolo/sensor.py - homeassistant/components/tolo/switch.py - homeassistant/components/toon/__init__.py - homeassistant/components/toon/binary_sensor.py - homeassistant/components/toon/climate.py - homeassistant/components/toon/coordinator.py - homeassistant/components/toon/helpers.py - homeassistant/components/toon/models.py - homeassistant/components/toon/oauth2.py - homeassistant/components/toon/sensor.py - homeassistant/components/toon/switch.py - homeassistant/components/torque/sensor.py - homeassistant/components/totalconnect/__init__.py - homeassistant/components/touchline/climate.py - homeassistant/components/tplink_lte/* - homeassistant/components/tplink_omada/__init__.py - homeassistant/components/tplink_omada/binary_sensor.py - homeassistant/components/tplink_omada/controller.py - homeassistant/components/tplink_omada/update.py - homeassistant/components/traccar/device_tracker.py - homeassistant/components/traccar_server/__init__.py - homeassistant/components/traccar_server/coordinator.py - homeassistant/components/traccar_server/device_tracker.py - homeassistant/components/traccar_server/entity.py - homeassistant/components/traccar_server/helpers.py - homeassistant/components/traccar_server/sensor.py - homeassistant/components/tradfri/__init__.py - homeassistant/components/tradfri/base_class.py - homeassistant/components/tradfri/coordinator.py - homeassistant/components/tradfri/cover.py - homeassistant/components/tradfri/fan.py - homeassistant/components/tradfri/light.py - homeassistant/components/tradfri/sensor.py - homeassistant/components/tradfri/switch.py - homeassistant/components/trafikverket_weatherstation/__init__.py - homeassistant/components/trafikverket_weatherstation/coordinator.py - homeassistant/components/trafikverket_weatherstation/sensor.py - homeassistant/components/transmission/__init__.py - homeassistant/components/transmission/coordinator.py - homeassistant/components/transmission/sensor.py - homeassistant/components/transmission/switch.py - homeassistant/components/travisci/sensor.py - homeassistant/components/tuya/__init__.py - homeassistant/components/tuya/alarm_control_panel.py - homeassistant/components/tuya/base.py - homeassistant/components/tuya/binary_sensor.py - homeassistant/components/tuya/button.py - homeassistant/components/tuya/camera.py - homeassistant/components/tuya/climate.py - homeassistant/components/tuya/cover.py - homeassistant/components/tuya/fan.py - homeassistant/components/tuya/humidifier.py - homeassistant/components/tuya/light.py - homeassistant/components/tuya/number.py - homeassistant/components/tuya/select.py - homeassistant/components/tuya/sensor.py - homeassistant/components/tuya/siren.py - homeassistant/components/tuya/switch.py - homeassistant/components/tuya/util.py - homeassistant/components/tuya/vacuum.py - homeassistant/components/twilio_call/notify.py - homeassistant/components/twilio_sms/notify.py - homeassistant/components/twitter/notify.py - homeassistant/components/ubus/device_tracker.py - homeassistant/components/ue_smart_radio/media_player.py - homeassistant/components/ukraine_alarm/__init__.py - homeassistant/components/ukraine_alarm/binary_sensor.py - homeassistant/components/unifi_direct/__init__.py - homeassistant/components/unifi_direct/device_tracker.py - homeassistant/components/unifiled/* - homeassistant/components/upb/__init__.py - homeassistant/components/upb/light.py - homeassistant/components/upc_connect/* - homeassistant/components/upcloud/__init__.py - homeassistant/components/upcloud/binary_sensor.py - homeassistant/components/upcloud/switch.py - homeassistant/components/upnp/__init__.py - homeassistant/components/upnp/device.py - homeassistant/components/upnp/sensor.py - homeassistant/components/v2c/__init__.py - homeassistant/components/v2c/binary_sensor.py - homeassistant/components/v2c/coordinator.py - homeassistant/components/v2c/entity.py - homeassistant/components/v2c/number.py - homeassistant/components/v2c/switch.py - homeassistant/components/vallox/__init__.py - homeassistant/components/vallox/coordinator.py - homeassistant/components/vasttrafik/sensor.py - homeassistant/components/velbus/__init__.py - homeassistant/components/velbus/binary_sensor.py - homeassistant/components/velbus/button.py - homeassistant/components/velbus/climate.py - homeassistant/components/velbus/cover.py - homeassistant/components/velbus/entity.py - homeassistant/components/velbus/light.py - homeassistant/components/velbus/select.py - homeassistant/components/velbus/sensor.py - homeassistant/components/velbus/switch.py - homeassistant/components/velux/__init__.py - homeassistant/components/velux/cover.py - homeassistant/components/velux/light.py - homeassistant/components/venstar/climate.py - homeassistant/components/venstar/coordinator.py - homeassistant/components/venstar/sensor.py - homeassistant/components/verisure/__init__.py - homeassistant/components/verisure/alarm_control_panel.py - homeassistant/components/verisure/binary_sensor.py - homeassistant/components/verisure/camera.py - homeassistant/components/verisure/coordinator.py - homeassistant/components/verisure/lock.py - homeassistant/components/verisure/sensor.py - homeassistant/components/verisure/switch.py - homeassistant/components/versasense/* - homeassistant/components/vesync/__init__.py - homeassistant/components/vesync/fan.py - homeassistant/components/vesync/light.py - homeassistant/components/vesync/sensor.py - homeassistant/components/vesync/switch.py - homeassistant/components/viaggiatreno/sensor.py - homeassistant/components/vicare/__init__.py - homeassistant/components/vicare/button.py - homeassistant/components/vicare/climate.py - homeassistant/components/vicare/entity.py - homeassistant/components/vicare/number.py - homeassistant/components/vicare/sensor.py - homeassistant/components/vicare/types.py - homeassistant/components/vicare/utils.py - homeassistant/components/vicare/water_heater.py - homeassistant/components/vilfo/__init__.py - homeassistant/components/vilfo/sensor.py - homeassistant/components/vivotek/camera.py - homeassistant/components/vlc/media_player.py - homeassistant/components/vlc_telnet/__init__.py - homeassistant/components/vlc_telnet/media_player.py - homeassistant/components/vodafone_station/__init__.py - homeassistant/components/vodafone_station/button.py - homeassistant/components/vodafone_station/coordinator.py - homeassistant/components/vodafone_station/device_tracker.py - homeassistant/components/vodafone_station/sensor.py - homeassistant/components/volkszaehler/sensor.py - homeassistant/components/volumio/__init__.py - homeassistant/components/volumio/browse_media.py - homeassistant/components/volumio/media_player.py - homeassistant/components/volvooncall/__init__.py - homeassistant/components/volvooncall/binary_sensor.py - homeassistant/components/volvooncall/device_tracker.py - homeassistant/components/volvooncall/lock.py - homeassistant/components/volvooncall/sensor.py - homeassistant/components/volvooncall/switch.py - homeassistant/components/vulcan/__init__.py - homeassistant/components/vulcan/calendar.py - homeassistant/components/vulcan/fetch_data.py - homeassistant/components/w800rf32/* - homeassistant/components/waqi/sensor.py - homeassistant/components/waterfurnace/* - homeassistant/components/watson_iot/* - homeassistant/components/watson_tts/tts.py - homeassistant/components/watttime/__init__.py - homeassistant/components/watttime/sensor.py - homeassistant/components/weatherflow/__init__.py - homeassistant/components/weatherflow/sensor.py - homeassistant/components/weatherflow_cloud/__init__.py - homeassistant/components/weatherflow_cloud/coordinator.py - homeassistant/components/weatherflow_cloud/weather.py - homeassistant/components/wiffi/__init__.py - homeassistant/components/wiffi/binary_sensor.py - homeassistant/components/wiffi/sensor.py - homeassistant/components/wiffi/wiffi_strings.py - homeassistant/components/wirelesstag/* - homeassistant/components/wolflink/__init__.py - homeassistant/components/wolflink/sensor.py - homeassistant/components/worldtidesinfo/sensor.py - homeassistant/components/worxlandroid/sensor.py - homeassistant/components/x10/light.py - homeassistant/components/xbox/__init__.py - homeassistant/components/xbox/api.py - homeassistant/components/xbox/base_sensor.py - homeassistant/components/xbox/binary_sensor.py - homeassistant/components/xbox/browse_media.py - homeassistant/components/xbox/coordinator.py - homeassistant/components/xbox/media_player.py - homeassistant/components/xbox/remote.py - homeassistant/components/xbox/sensor.py - homeassistant/components/xeoma/camera.py - homeassistant/components/xiaomi/camera.py - homeassistant/components/xiaomi_aqara/__init__.py - homeassistant/components/xiaomi_aqara/binary_sensor.py - homeassistant/components/xiaomi_aqara/cover.py - homeassistant/components/xiaomi_aqara/light.py - homeassistant/components/xiaomi_aqara/lock.py - homeassistant/components/xiaomi_aqara/sensor.py - homeassistant/components/xiaomi_aqara/switch.py - homeassistant/components/xiaomi_miio/__init__.py - homeassistant/components/xiaomi_miio/air_quality.py - homeassistant/components/xiaomi_miio/alarm_control_panel.py - homeassistant/components/xiaomi_miio/binary_sensor.py - homeassistant/components/xiaomi_miio/button.py - homeassistant/components/xiaomi_miio/device.py - homeassistant/components/xiaomi_miio/device_tracker.py - homeassistant/components/xiaomi_miio/fan.py - homeassistant/components/xiaomi_miio/gateway.py - homeassistant/components/xiaomi_miio/humidifier.py - homeassistant/components/xiaomi_miio/light.py - homeassistant/components/xiaomi_miio/number.py - homeassistant/components/xiaomi_miio/remote.py - homeassistant/components/xiaomi_miio/sensor.py - homeassistant/components/xiaomi_miio/switch.py - homeassistant/components/xiaomi_miio/typing.py - homeassistant/components/xiaomi_tv/media_player.py - homeassistant/components/xmpp/notify.py - homeassistant/components/xs1/* - homeassistant/components/yale_smart_alarm/__init__.py - homeassistant/components/yale_smart_alarm/alarm_control_panel.py - homeassistant/components/yale_smart_alarm/entity.py - homeassistant/components/yalexs_ble/__init__.py - homeassistant/components/yalexs_ble/binary_sensor.py - homeassistant/components/yalexs_ble/entity.py - homeassistant/components/yalexs_ble/lock.py - homeassistant/components/yalexs_ble/sensor.py - homeassistant/components/yalexs_ble/util.py - homeassistant/components/yamaha_musiccast/__init__.py - homeassistant/components/yamaha_musiccast/media_player.py - homeassistant/components/yamaha_musiccast/number.py - homeassistant/components/yamaha_musiccast/select.py - homeassistant/components/yamaha_musiccast/switch.py - homeassistant/components/yandex_transport/sensor.py - homeassistant/components/yardian/__init__.py - homeassistant/components/yardian/coordinator.py - homeassistant/components/yardian/switch.py - homeassistant/components/yeelightsunflower/light.py - homeassistant/components/yi/camera.py - homeassistant/components/yolink/__init__.py - homeassistant/components/yolink/api.py - homeassistant/components/yolink/binary_sensor.py - homeassistant/components/yolink/climate.py - homeassistant/components/yolink/coordinator.py - homeassistant/components/yolink/cover.py - homeassistant/components/yolink/entity.py - homeassistant/components/yolink/light.py - homeassistant/components/yolink/lock.py - homeassistant/components/yolink/number.py - homeassistant/components/yolink/sensor.py - homeassistant/components/yolink/services.py - homeassistant/components/yolink/siren.py - homeassistant/components/yolink/switch.py - homeassistant/components/yolink/valve.py - homeassistant/components/zabbix/* - homeassistant/components/zamg/coordinator.py - homeassistant/components/zengge/light.py - homeassistant/components/zeroconf/models.py - homeassistant/components/zeroconf/usage.py - homeassistant/components/zestimate/sensor.py - homeassistant/components/zha/core/cluster_handlers/* - homeassistant/components/zha/core/device.py - homeassistant/components/zha/core/gateway.py - homeassistant/components/zha/core/helpers.py - homeassistant/components/zha/light.py - homeassistant/components/zha/websocket_api.py - homeassistant/components/zhong_hong/climate.py - homeassistant/components/ziggo_mediabox_xl/media_player.py - homeassistant/components/zoneminder/* - homeassistant/components/zwave_me/__init__.py - homeassistant/components/zwave_me/binary_sensor.py - homeassistant/components/zwave_me/button.py - homeassistant/components/zwave_me/climate.py - homeassistant/components/zwave_me/cover.py - homeassistant/components/zwave_me/fan.py - homeassistant/components/zwave_me/helpers.py - homeassistant/components/zwave_me/light.py - homeassistant/components/zwave_me/lock.py - homeassistant/components/zwave_me/number.py - homeassistant/components/zwave_me/sensor.py - homeassistant/components/zwave_me/siren.py - homeassistant/components/zwave_me/switch.py - - -[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # TYPE_CHECKING and @overload blocks are never executed during pytest run - if TYPE_CHECKING: - @overload diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index d69b1ac0c7d..23365feffb7 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -74,7 +74,6 @@ If the code communicates with devices, web services, or third-party tools: - [ ] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description. -- [ ] Untested files have been added to `.coveragerc`. ssdp_confirm(None) --> ssdp_confirm({}) --> create_entry() # 2: user(None): scan --> user({...}) --> create_entry() + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + """Get the options flow for this handler.""" + return UpnpOptionsFlowHandler(config_entry) + @property def _discoveries(self) -> dict[str, SsdpServiceInfo]: """Get current discoveries.""" @@ -249,9 +264,14 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], CONFIG_ENTRY_LOCATION: get_preferred_location(discovery.ssdp_all_locations), } + options = { + CONFIG_ENTRY_FORCE_POLL: False, + } await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False) - return self.async_create_entry(title=user_input["title"], data=data) + return self.async_create_entry( + title=user_input["title"], data=data, options=options + ) async def _async_create_entry_from_discovery( self, @@ -273,4 +293,30 @@ class UpnpFlowHandler(ConfigFlow, domain=DOMAIN): CONFIG_ENTRY_MAC_ADDRESS: mac_address, CONFIG_ENTRY_HOST: discovery.ssdp_headers["_host"], } - return self.async_create_entry(title=title, data=data) + options = { + CONFIG_ENTRY_FORCE_POLL: False, + } + return self.async_create_entry(title=title, data=data, options=options) + + +class UpnpOptionsFlowHandler(OptionsFlowWithConfigEntry): + """Handle an options flow.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle options flow.""" + if user_input is not None: + return self.async_create_entry(title="", data=user_input) + + data_schema = vol.Schema( + { + vol.Optional( + CONFIG_ENTRY_FORCE_POLL, + default=self.options.get( + CONFIG_ENTRY_FORCE_POLL, DEFAULT_CONFIG_ENTRY_FORCE_POLL + ), + ): bool, + } + ) + return self.async_show_form(step_id="init", data_schema=data_schema) diff --git a/homeassistant/components/upnp/const.py b/homeassistant/components/upnp/const.py index e7b44329546..d85675d8a4d 100644 --- a/homeassistant/components/upnp/const.py +++ b/homeassistant/components/upnp/const.py @@ -21,8 +21,10 @@ TIMESTAMP = "timestamp" DATA_PACKETS = "packets" DATA_RATE_PACKETS_PER_SECOND = f"{DATA_PACKETS}/{UnitOfTime.SECONDS}" WAN_STATUS = "wan_status" +PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4 = "port_mapping_number_of_entries" ROUTER_IP = "ip" ROUTER_UPTIME = "uptime" +CONFIG_ENTRY_FORCE_POLL = "force_poll" CONFIG_ENTRY_ST = "st" CONFIG_ENTRY_UDN = "udn" CONFIG_ENTRY_ORIGINAL_UDN = "original_udn" @@ -32,5 +34,6 @@ CONFIG_ENTRY_HOST = "host" IDENTIFIER_HOST = "upnp_host" IDENTIFIER_SERIAL_NUMBER = "upnp_serial_number" DEFAULT_SCAN_INTERVAL = timedelta(seconds=30).total_seconds() +DEFAULT_CONFIG_ENTRY_FORCE_POLL = False ST_IGD_V1 = "urn:schemas-upnp-org:device:InternetGatewayDevice:1" ST_IGD_V2 = "urn:schemas-upnp-org:device:InternetGatewayDevice:2" diff --git a/homeassistant/components/upnp/coordinator.py b/homeassistant/components/upnp/coordinator.py index 72e14ecc4ff..37ff700bfe2 100644 --- a/homeassistant/components/upnp/coordinator.py +++ b/homeassistant/components/upnp/coordinator.py @@ -1,5 +1,7 @@ """UPnP/IGD coordinator.""" +from collections import defaultdict +from collections.abc import Callable from datetime import datetime, timedelta from async_upnp_client.exceptions import UpnpCommunicationError @@ -27,6 +29,7 @@ class UpnpDataUpdateCoordinator( """Initialize.""" self.device = device self.device_entry = device_entry + self._features_by_entity_id: defaultdict[str, set[str]] = defaultdict(set) super().__init__( hass, @@ -35,12 +38,34 @@ class UpnpDataUpdateCoordinator( update_interval=update_interval, ) + def register_entity(self, key: str, entity_id: str) -> Callable[[], None]: + """Register an entity.""" + self._features_by_entity_id[key].add(entity_id) + + def unregister_entity() -> None: + """Unregister entity.""" + self._features_by_entity_id[key].remove(entity_id) + + if not self._features_by_entity_id[key]: + del self._features_by_entity_id[key] + + return unregister_entity + + @property + def _entity_description_keys(self) -> list[str] | None: + """Return a list of entity description keys for which data is required.""" + if not self._features_by_entity_id: + # Must be the first update, no entities attached/enabled yet. + return None + + return list(self._features_by_entity_id) + async def _async_update_data( self, ) -> dict[str, str | datetime | int | float | None]: """Update data.""" try: - return await self.device.async_get_data() + return await self.device.async_get_data(self._entity_description_keys) except UpnpCommunicationError as exception: LOGGER.debug( "Caught exception when updating device: %s, exception: %s", diff --git a/homeassistant/components/upnp/device.py b/homeassistant/components/upnp/device.py index bb0bcfc6a6e..923d4828879 100644 --- a/homeassistant/components/upnp/device.py +++ b/homeassistant/components/upnp/device.py @@ -8,9 +8,12 @@ from ipaddress import ip_address from typing import Any from urllib.parse import urlparse -from async_upnp_client.aiohttp import AiohttpSessionRequester +from async_upnp_client.aiohttp import AiohttpNotifyServer, AiohttpSessionRequester from async_upnp_client.client_factory import UpnpFactory -from async_upnp_client.profiles.igd import IgdDevice +from async_upnp_client.const import AddressTupleVXType +from async_upnp_client.exceptions import UpnpConnectionError +from async_upnp_client.profiles.igd import IgdDevice, IgdStateItem +from async_upnp_client.utils import async_get_local_ip from getmac import get_mac_address from homeassistant.core import HomeAssistant @@ -27,12 +30,28 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, TIMESTAMP, WAN_STATUS, ) +TYPE_STATE_ITEM_MAPPING = { + BYTES_RECEIVED: IgdStateItem.BYTES_RECEIVED, + BYTES_SENT: IgdStateItem.BYTES_SENT, + KIBIBYTES_PER_SEC_RECEIVED: IgdStateItem.KIBIBYTES_PER_SEC_RECEIVED, + KIBIBYTES_PER_SEC_SENT: IgdStateItem.KIBIBYTES_PER_SEC_SENT, + PACKETS_PER_SEC_RECEIVED: IgdStateItem.PACKETS_PER_SEC_RECEIVED, + PACKETS_PER_SEC_SENT: IgdStateItem.PACKETS_PER_SEC_SENT, + PACKETS_RECEIVED: IgdStateItem.PACKETS_RECEIVED, + PACKETS_SENT: IgdStateItem.PACKETS_SENT, + ROUTER_IP: IgdStateItem.EXTERNAL_IP_ADDRESS, + ROUTER_UPTIME: IgdStateItem.UPTIME, + WAN_STATUS: IgdStateItem.CONNECTION_STATUS, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: IgdStateItem.PORT_MAPPING_NUMBER_OF_ENTRIES, +} + def get_preferred_location(locations: set[str]) -> str: """Get the preferred location (an IPv4 location) from a set of locations.""" @@ -64,26 +83,43 @@ async def async_get_mac_address_from_host(hass: HomeAssistant, host: str) -> str return mac_address -async def async_create_device(hass: HomeAssistant, location: str) -> Device: +async def async_create_device( + hass: HomeAssistant, location: str, force_poll: bool +) -> Device: """Create UPnP/IGD device.""" session = async_get_clientsession(hass, verify_ssl=False) requester = AiohttpSessionRequester(session, with_sleep=True, timeout=20) + # Create UPnP device. factory = UpnpFactory(requester, non_strict=True) upnp_device = await factory.async_create_device(location) + # Create notify server. + _, local_ip = await async_get_local_ip(location) + source: AddressTupleVXType = (local_ip, 0) + notify_server = AiohttpNotifyServer( + requester=requester, + source=source, + ) + await notify_server.async_start_server() + _LOGGER.debug("Started event handler at %s", notify_server.callback_url) + # Create profile wrapper. - igd_device = IgdDevice(upnp_device, None) - return Device(hass, igd_device) + igd_device = IgdDevice(upnp_device, notify_server.event_handler) + return Device(hass, igd_device, force_poll) class Device: """Home Assistant representation of a UPnP/IGD device.""" - def __init__(self, hass: HomeAssistant, igd_device: IgdDevice) -> None: + def __init__( + self, hass: HomeAssistant, igd_device: IgdDevice, force_poll: bool + ) -> None: """Initialize UPnP/IGD device.""" self.hass = hass self._igd_device = igd_device + self._force_poll = force_poll + self.coordinator: ( DataUpdateCoordinator[dict[str, str | datetime | int | float | None]] | None ) = None @@ -151,11 +187,54 @@ class Device: """Get string representation.""" return f"IGD Device: {self.name}/{self.udn}::{self.device_type}" - async def async_get_data(self) -> dict[str, str | datetime | int | float | None]: + @property + def force_poll(self) -> bool: + """Get force_poll.""" + return self._force_poll + + async def async_set_force_poll(self, force_poll: bool) -> None: + """Set force_poll, and (un)subscribe if needed.""" + self._force_poll = force_poll + + if self._force_poll: + # No need for subscriptions, as eventing will never be used. + await self.async_unsubscribe_services() + elif not self._force_poll and not self._igd_device.is_subscribed: + await self.async_subscribe_services() + + async def async_subscribe_services(self) -> None: + """Subscribe to services.""" + try: + await self._igd_device.async_subscribe_services(auto_resubscribe=True) + except UpnpConnectionError as ex: + _LOGGER.debug( + "Error subscribing to services, falling back to forced polling: %s", ex + ) + await self.async_set_force_poll(True) + + async def async_unsubscribe_services(self) -> None: + """Unsubscribe from services.""" + await self._igd_device.async_unsubscribe_services() + + async def async_get_data( + self, entity_description_keys: list[str] | None + ) -> dict[str, str | datetime | int | float | None]: """Get all data from device.""" - _LOGGER.debug("Getting data for device: %s", self) + if not entity_description_keys: + igd_state_items = None + else: + igd_state_items = { + TYPE_STATE_ITEM_MAPPING[key] for key in entity_description_keys + } + + _LOGGER.debug( + "Getting data for device: %s, state_items: %s, force_poll: %s", + self, + igd_state_items, + self._force_poll, + ) igd_state = await self._igd_device.async_get_traffic_and_status_data( - force_poll=True + igd_state_items, force_poll=self._force_poll ) def get_value(value: Any) -> Any: @@ -177,4 +256,7 @@ class Device: KIBIBYTES_PER_SEC_SENT: igd_state.kibibytes_per_sec_sent, PACKETS_PER_SEC_RECEIVED: igd_state.packets_per_sec_received, PACKETS_PER_SEC_SENT: igd_state.packets_per_sec_sent, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4: get_value( + igd_state.port_mapping_number_of_entries + ), } diff --git a/homeassistant/components/upnp/icons.json b/homeassistant/components/upnp/icons.json index 1d4ebaf183d..b6451f0fca8 100644 --- a/homeassistant/components/upnp/icons.json +++ b/homeassistant/components/upnp/icons.json @@ -33,6 +33,9 @@ }, "packet_upload_speed": { "default": "mdi:server-network" + }, + "port_mapping_number_of_entries_ipv4": { + "default": "mdi:server-network" } } } diff --git a/homeassistant/components/upnp/manifest.json b/homeassistant/components/upnp/manifest.json index b2972fc7790..30054af0512 100644 --- a/homeassistant/components/upnp/manifest.json +++ b/homeassistant/components/upnp/manifest.json @@ -8,7 +8,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["async_upnp_client"], - "requirements": ["async-upnp-client==0.39.0", "getmac==0.9.4"], + "requirements": ["async-upnp-client==0.40.0", "getmac==0.9.4"], "ssdp": [ { "st": "urn:schemas-upnp-org:device:InternetGatewayDevice:1" diff --git a/homeassistant/components/upnp/sensor.py b/homeassistant/components/upnp/sensor.py index df7128830b3..d6da50c877d 100644 --- a/homeassistant/components/upnp/sensor.py +++ b/homeassistant/components/upnp/sensor.py @@ -33,6 +33,7 @@ from .const import ( PACKETS_PER_SEC_SENT, PACKETS_RECEIVED, PACKETS_SENT, + PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, ROUTER_IP, ROUTER_UPTIME, WAN_STATUS, @@ -99,6 +100,12 @@ SENSOR_DESCRIPTIONS: tuple[UpnpSensorEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, ), + UpnpSensorEntityDescription( + key=PORT_MAPPING_NUMBER_OF_ENTRIES_IPV4, + translation_key="port_mapping_number_of_entries_ipv4", + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), UpnpSensorEntityDescription( key=BYTES_RECEIVED, translation_key="download_speed", @@ -159,8 +166,8 @@ async def async_setup_entry( if coordinator.data.get(entity_description.key) is not None ] - LOGGER.debug("Adding sensor entities: %s", entities) async_add_entities(entities) + LOGGER.debug("Added sensor entities: %s", entities) class UpnpSensor(UpnpEntity, SensorEntity): @@ -174,3 +181,13 @@ class UpnpSensor(UpnpEntity, SensorEntity): if (key := self.entity_description.value_key) is None: return None return self.coordinator.data[key] + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + await super().async_added_to_hass() + + # Register self at coordinator. + key = self.entity_description.key + entity_id = self.entity_id + unregister = self.coordinator.register_entity(key, entity_id) + self.async_on_remove(unregister) diff --git a/homeassistant/components/upnp/strings.json b/homeassistant/components/upnp/strings.json index 7ce1798c351..bb414fa95f8 100644 --- a/homeassistant/components/upnp/strings.json +++ b/homeassistant/components/upnp/strings.json @@ -21,7 +21,8 @@ "step": { "init": { "data": { - "scan_interval": "Update interval (seconds, minimal 30)" + "scan_interval": "Update interval (seconds, minimal 30)", + "force_poll": "Force polling of all data" } } } @@ -65,6 +66,9 @@ }, "wan_status": { "name": "WAN status" + }, + "port_mapping_number_of_entries_ipv4": { + "name": "Number of port mapping entries (IPv4)" } } } diff --git a/homeassistant/components/usgs_earthquakes_feed/geo_location.py b/homeassistant/components/usgs_earthquakes_feed/geo_location.py index 33455dc11a9..aa9817eab7d 100644 --- a/homeassistant/components/usgs_earthquakes_feed/geo_location.py +++ b/homeassistant/components/usgs_earthquakes_feed/geo_location.py @@ -276,17 +276,17 @@ class UsgsEarthquakesEvent(GeolocationEvent): @property def extra_state_attributes(self) -> dict[str, Any]: """Return the device state attributes.""" - attributes = {} - for key, value in ( - (ATTR_EXTERNAL_ID, self._external_id), - (ATTR_PLACE, self._place), - (ATTR_MAGNITUDE, self._magnitude), - (ATTR_TIME, self._time), - (ATTR_UPDATED, self._updated), - (ATTR_STATUS, self._status), - (ATTR_TYPE, self._type), - (ATTR_ALERT, self._alert), - ): - if value or isinstance(value, bool): - attributes[key] = value - return attributes + return { + key: value + for key, value in ( + (ATTR_EXTERNAL_ID, self._external_id), + (ATTR_PLACE, self._place), + (ATTR_MAGNITUDE, self._magnitude), + (ATTR_TIME, self._time), + (ATTR_UPDATED, self._updated), + (ATTR_STATUS, self._status), + (ATTR_TYPE, self._type), + (ATTR_ALERT, self._alert), + ) + if value or isinstance(value, bool) + } diff --git a/homeassistant/components/v2c/icons.json b/homeassistant/components/v2c/icons.json index 1b76b669956..6b0a41bf752 100644 --- a/homeassistant/components/v2c/icons.json +++ b/homeassistant/components/v2c/icons.json @@ -21,6 +21,15 @@ }, "battery_power": { "default": "mdi:home-battery" + }, + "ssid": { + "default": "mdi:wifi" + }, + "ip_address": { + "default": "mdi:ip" + }, + "signal_status": { + "default": "mdi:signal" } }, "switch": { diff --git a/homeassistant/components/v2c/manifest.json b/homeassistant/components/v2c/manifest.json index ffe4b52ee6e..3a6eab0f335 100644 --- a/homeassistant/components/v2c/manifest.json +++ b/homeassistant/components/v2c/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/v2c", "iot_class": "local_polling", - "requirements": ["pytrydan==0.7.0"] + "requirements": ["pytrydan==0.8.0"] } diff --git a/homeassistant/components/v2c/number.py b/homeassistant/components/v2c/number.py index 2ff70226132..1540b098cf1 100644 --- a/homeassistant/components/v2c/number.py +++ b/homeassistant/components/v2c/number.py @@ -13,6 +13,7 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) +from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,11 +38,34 @@ TRYDAN_NUMBER_SETTINGS = ( key="intensity", translation_key="intensity", device_class=NumberDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, native_min_value=MIN_INTENSITY, native_max_value=MAX_INTENSITY, value_fn=lambda evse_data: evse_data.intensity, update_fn=lambda evse, value: evse.intensity(value), ), + V2CSettingsNumberEntityDescription( + key="min_intensity", + translation_key="min_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.min_intensity, + update_fn=lambda evse, value: evse.min_intensity(value), + ), + V2CSettingsNumberEntityDescription( + key="max_intensity", + translation_key="max_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.max_intensity, + update_fn=lambda evse, value: evse.max_intensity(value), + ), ) diff --git a/homeassistant/components/v2c/sensor.py b/homeassistant/components/v2c/sensor.py index fc0cc0bfaa8..97853740e9d 100644 --- a/homeassistant/components/v2c/sensor.py +++ b/homeassistant/components/v2c/sensor.py @@ -15,7 +15,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTime +from homeassistant.const import ( + EntityCategory, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -45,12 +51,20 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="charge_power", translation_key="charge_power", - icon="mdi:ev-station", native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, value_fn=lambda evse_data: evse_data.charge_power, ), + V2CSensorEntityDescription( + key="voltage_installation", + translation_key="voltage_installation", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + value_fn=lambda evse_data: evse_data.voltage_installation, + entity_registry_enabled_default=False, + ), V2CSensorEntityDescription( key="charge_energy", translation_key="charge_energy", @@ -86,6 +100,7 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="meter_error", translation_key="meter_error", + entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda evse_data: get_meter_value(evse_data.slave_error), entity_registry_enabled_default=False, device_class=SensorDeviceClass.ENUM, @@ -100,6 +115,28 @@ TRYDAN_SENSORS = ( value_fn=lambda evse_data: evse_data.battery_power, entity_registry_enabled_default=False, ), + V2CSensorEntityDescription( + key="ssid", + translation_key="ssid", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.SSID, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="ip_address", + translation_key="ip_address", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.IP, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="signal_status", + translation_key="signal_status", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda evse_data: evse_data.signal_status, + entity_registry_enabled_default=False, + ), ) diff --git a/homeassistant/components/v2c/strings.json b/homeassistant/components/v2c/strings.json index 3342652cfb4..d52b8f066f9 100644 --- a/homeassistant/components/v2c/strings.json +++ b/homeassistant/components/v2c/strings.json @@ -33,12 +33,21 @@ "number": { "intensity": { "name": "Intensity" + }, + "max_intensity": { + "name": "Max intensity" + }, + "min_intensity": { + "name": "Min intensity" } }, "sensor": { "charge_power": { "name": "Charge power" }, + "voltage_installation": { + "name": "Installation voltage" + }, "charge_energy": { "name": "Charge energy" }, @@ -93,6 +102,15 @@ "empty_message": "Empty message", "undefined_error": "Undefined error" } + }, + "ssid": { + "name": "SSID" + }, + "ip_address": { + "name": "IP address" + }, + "signal_status": { + "name": "Signal status" } }, "switch": { diff --git a/homeassistant/components/v2c/switch.py b/homeassistant/components/v2c/switch.py index cd89e954275..cca7da70e48 100644 --- a/homeassistant/components/v2c/switch.py +++ b/homeassistant/components/v2c/switch.py @@ -111,12 +111,12 @@ class V2CSwitchEntity(V2CBaseEntity, SwitchEntity): """Return the state of the EVSE switch.""" return self.entity_description.value_fn(self.data) - async def async_turn_on(self): + async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the EVSE switch.""" await self.entity_description.turn_on_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() - async def async_turn_off(self): + async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the EVSE switch.""" await self.entity_description.turn_off_fn(self.coordinator.evse) await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index a5bdf0983ae..4fe2cfd45d4 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -77,7 +77,13 @@ class ValloxFanEntity(ValloxEntity, FanEntity): """Representation of the fan.""" _attr_name = None - _attr_supported_features = FanEntityFeature.PRESET_MODE | FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False def __init__( self, diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index e97a68c2e82..3814275b703 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -223,7 +223,8 @@ class ValveEntity(Entity): async def async_handle_open_valve(self) -> None: """Open the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(100) + await self.async_set_valve_position(100) + return await self.async_open_valve() def close_valve(self) -> None: @@ -238,7 +239,8 @@ class ValveEntity(Entity): async def async_handle_close_valve(self) -> None: """Close the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(0) + await self.async_set_valve_position(0) + return await self.async_close_valve() async def async_toggle(self) -> None: diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 349196658d4..2c887ebf273 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -1,13 +1,19 @@ { "entity_component": { "_": { - "default": "mdi:pipe-valve" + "default": "mdi:valve-open", + "state": { + "closed": "mdi:valve-closed" + } }, "gas": { "default": "mdi:meter-gas" }, "water": { - "default": "mdi:pipe-valve" + "default": "mdi:valve-open", + "state": { + "closed": "mdi:valve-closed" + } } }, "services": { diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index 479b7f02024..d47444e3994 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True def check_entry_id(interface: str) -> str: - for entry in hass.config_entries.async_entries(DOMAIN): - if "port" in entry.data and entry.data["port"] == interface: - return entry.entry_id + for config_entry in hass.config_entries.async_entries(DOMAIN): + if "port" in config_entry.data and config_entry.data["port"] == interface: + return config_entry.entry_id raise vol.Invalid( "The interface provided is not defined as a port in a Velbus integration" ) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 4e9478ae575..c1cf2951bbd 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.7.5"], + "requirements": ["velbus-aio==2024.7.6"], "usb": [ { "vid": "10CF", diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 4b89fc66a84..1b7cbd1ff93 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -108,10 +108,14 @@ class VeluxEntity(Entity): _attr_should_poll = False - def __init__(self, node: Node) -> None: + def __init__(self, node: Node, config_entry_id: str) -> None: """Initialize the Velux device.""" self.node = node - self._attr_unique_id = node.serial_number + self._attr_unique_id = ( + node.serial_number + if node.serial_number + else f"{config_entry_id}_{node.node_id}" + ) self._attr_name = node.name if node.name else f"#{node.node_id}" @callback diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index c8688e4d186..cd7564eee81 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -29,7 +29,7 @@ async def async_setup_entry( """Set up cover(s) for Velux platform.""" module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxCover(node) + VeluxCover(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, OpeningDevice) ) @@ -41,9 +41,9 @@ class VeluxCover(VeluxEntity, CoverEntity): _is_blind = False node: OpeningDevice - def __init__(self, node: OpeningDevice) -> None: + def __init__(self, node: OpeningDevice, config_entry_id: str) -> None: """Initialize VeluxCover.""" - super().__init__(node) + super().__init__(node, config_entry_id) self._attr_device_class = CoverDeviceClass.WINDOW if isinstance(node, Awning): self._attr_device_class = CoverDeviceClass.AWNING diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index bbe9822648e..e98632701f3 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -23,7 +23,7 @@ async def async_setup_entry( module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxLight(node) + VeluxLight(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, LighteningDevice) ) diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index fcb1e5f013e..181849f46a1 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -22,6 +22,7 @@ from homeassistant.config_entries import ( from homeassistant.const import CONF_EXCLUDE, CONF_LIGHTS, CONF_SOURCE from homeassistant.core import callback from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.typing import VolDictType from .const import CONF_CONTROLLER, CONF_LEGACY_UNIQUE_ID, DOMAIN @@ -49,9 +50,7 @@ def new_options(lights: list[int], exclude: list[int]) -> dict[str, list[int]]: return {CONF_LIGHTS: lights, CONF_EXCLUDE: exclude} -def options_schema( - options: Mapping[str, Any] | None = None, -) -> dict[vol.Optional, type[str]]: +def options_schema(options: Mapping[str, Any] | None = None) -> VolDictType: """Return options schema.""" options = options or {} return { diff --git a/homeassistant/components/vera/cover.py b/homeassistant/components/vera/cover.py index 542680925f2..25ffe987d5e 100644 --- a/homeassistant/components/vera/cover.py +++ b/homeassistant/components/vera/cover.py @@ -61,10 +61,11 @@ class VeraCover(VeraDevice[veraApi.VeraCurtain], CoverEntity): self.schedule_update_ha_state() @property - def is_closed(self) -> bool: + def is_closed(self) -> bool | None: """Return if the cover is closed.""" if self.current_cover_position is not None: return self.current_cover_position == 0 + return None def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" diff --git a/homeassistant/components/versasense/sensor.py b/homeassistant/components/versasense/sensor.py index 59d092ccdc1..4c861bf5787 100644 --- a/homeassistant/components/versasense/sensor.py +++ b/homeassistant/components/versasense/sensor.py @@ -30,7 +30,7 @@ async def async_setup_platform( ) -> None: """Set up the sensor platform.""" if discovery_info is None: - return None + return consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/versasense/switch.py b/homeassistant/components/versasense/switch.py index 195045882ff..10bca79e536 100644 --- a/homeassistant/components/versasense/switch.py +++ b/homeassistant/components/versasense/switch.py @@ -33,7 +33,7 @@ async def async_setup_platform( ) -> None: """Set up actuator platform.""" if discovery_info is None: - return None + return consumer = hass.data[DOMAIN][KEY_CONSUMER] diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 6272c033b4f..4dce2762eef 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -84,8 +84,14 @@ def _setup_entities(devices, async_add_entities): class VeSyncFanHA(VeSyncDevice, FanEntity): """Representation of a VeSync fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) _attr_name = None + _enable_turn_on_off_backwards_compatibility = False def __init__(self, fan) -> None: """Initialize the VeSync fan device.""" diff --git a/homeassistant/components/vesync/manifest.json b/homeassistant/components/vesync/manifest.json index ff3f56dd184..c5926cc224a 100644 --- a/homeassistant/components/vesync/manifest.json +++ b/homeassistant/components/vesync/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/vesync", "iot_class": "cloud_polling", "loggers": ["pyvesync"], - "requirements": ["pyvesync==2.1.10"] + "requirements": ["pyvesync==2.1.12"] } diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 24ab94778e3..8f8ae3c94e3 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -10,6 +10,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, + Platform.FAN, Platform.NUMBER, Platform.SENSOR, Platform.WATER_HEATER, @@ -18,6 +19,7 @@ PLATFORMS = [ UNSUPPORTED_DEVICES = [ "Heatbox1", "Heatbox2_SRC", + "E3_TCU41_x04", "E3_FloorHeatingCircuitChannel", "E3_FloorHeatingCircuitDistributorBox", "E3_RoomControl_One_522", diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py new file mode 100644 index 00000000000..088e54c7354 --- /dev/null +++ b/homeassistant/components/vicare/fan.py @@ -0,0 +1,124 @@ +"""Viessmann ViCare ventilation device.""" + +from __future__ import annotations + +from contextlib import suppress +import logging + +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareUtils import ( + PyViCareInvalidDataError, + PyViCareNotSupportedFeatureError, + PyViCareRateLimitError, +) +from PyViCare.PyViCareVentilationDevice import ( + VentilationDevice as PyViCareVentilationDevice, +) +from requests.exceptions import ConnectionError as RequestConnectionError + +from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.percentage import ( + ordered_list_item_to_percentage, + percentage_to_ordered_list_item, +) + +from .const import DEVICE_LIST, DOMAIN +from .entity import ViCareEntity +from .types import VentilationMode, VentilationProgram + +_LOGGER = logging.getLogger(__name__) + +ORDERED_NAMED_FAN_SPEEDS = [ + VentilationProgram.LEVEL_ONE, + VentilationProgram.LEVEL_TWO, + VentilationProgram.LEVEL_THREE, + VentilationProgram.LEVEL_FOUR, +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ViCare fan platform.""" + + device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] + + async_add_entities( + [ + ViCareFan(device.config, device.api) + for device in device_list + if isinstance(device.api, PyViCareVentilationDevice) + ] + ) + + +class ViCareFan(ViCareEntity, FanEntity): + """Representation of the ViCare ventilation device.""" + + _attr_preset_modes = list[str]( + [ + VentilationMode.PERMANENT, + VentilationMode.VENTILATION, + VentilationMode.SENSOR_DRIVEN, + VentilationMode.SENSOR_OVERRIDE, + ] + ) + _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_translation_key = "ventilation" + _enable_turn_on_off_backwards_compatibility = False + + def __init__( + self, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + ) -> None: + """Initialize the fan entity.""" + super().__init__(device_config, device, self._attr_translation_key) + + def update(self) -> None: + """Update state of fan.""" + try: + with suppress(PyViCareNotSupportedFeatureError): + self._attr_preset_mode = VentilationMode.from_vicare_mode( + self._api.getActiveMode() + ) + with suppress(PyViCareNotSupportedFeatureError): + self._attr_percentage = ordered_list_item_to_percentage( + ORDERED_NAMED_FAN_SPEEDS, self._api.getActiveProgram() + ) + except RequestConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") + except PyViCareRateLimitError as limit_exception: + _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) + except PyViCareInvalidDataError as invalid_data_exception: + _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) + + @property + def is_on(self) -> bool | None: + """Return true if the entity is on.""" + # Viessmann ventilation unit cannot be turned off + return True + + def set_percentage(self, percentage: int) -> None: + """Set the speed of the fan, as a percentage.""" + if self._attr_preset_mode != str(VentilationMode.PERMANENT): + self.set_preset_mode(VentilationMode.PERMANENT) + + level = percentage_to_ordered_list_item(ORDERED_NAMED_FAN_SPEEDS, percentage) + _LOGGER.debug("changing ventilation level to %s", level) + self._api.setPermanentLevel(level) + + def set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + target_mode = VentilationMode.to_vicare_mode(preset_mode) + _LOGGER.debug("changing ventilation mode to %s", target_mode) + self._api.setActiveMode(target_mode) diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index de92d0ec271..0452a560cb8 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -65,6 +65,21 @@ "name": "Heating" } }, + "fan": { + "ventilation": { + "name": "Ventilation", + "state_attributes": { + "preset_mode": { + "state": { + "permanent": "permanent", + "ventilation": "schedule", + "sensor_driven": "sensor", + "sensor_override": "schedule with sensor-override" + } + } + } + } + }, "number": { "heating_curve_shift": { "name": "Heating curve shift" @@ -304,8 +319,8 @@ "ess_discharge_total": { "name": "Battery discharge total" }, - "pcc_current_power_exchange": { - "name": "Grid power exchange" + "pcc_transfer_power_exchange": { + "name": "Power exchange with grid" }, "pcc_energy_consumption": { "name": "Energy import from grid" diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 7e1ec7f8bee..596605fccdd 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -64,6 +64,55 @@ VICARE_TO_HA_PRESET_HEATING = { } +class VentilationMode(enum.StrEnum): + """ViCare ventilation modes.""" + + PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) + VENTILATION = "ventilation" # activated by schedule + SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor + SENSOR_OVERRIDE = "sensor_override" # activated by sensor + + @staticmethod + def to_vicare_mode(mode: str | None) -> str | None: + """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" + if mode: + try: + ventilation_mode = VentilationMode(mode) + except ValueError: + # ignore unsupported / unmapped modes + return None + return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None + return None + + @staticmethod + def from_vicare_mode(vicare_mode: str | None) -> str | None: + """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" + for mode in VentilationMode: + if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: + return mode + return None + + +HA_TO_VICARE_MODE_VENTILATION = { + VentilationMode.PERMANENT: "permanent", + VentilationMode.VENTILATION: "ventilation", + VentilationMode.SENSOR_DRIVEN: "sensorDriven", + VentilationMode.SENSOR_OVERRIDE: "sensorOverride", +} + + +class VentilationProgram(enum.StrEnum): + """ViCare preset ventilation programs. + + As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 + """ + + LEVEL_ONE = "levelOne" + LEVEL_TWO = "levelTwo" + LEVEL_THREE = "levelThree" + LEVEL_FOUR = "levelFour" + + @dataclass(frozen=True) class ViCareDevice: """Dataclass holding the device api and config.""" diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 5770d9d2b4a..161e938a3b6 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -21,7 +21,7 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import stt, tts +from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, PipelineEvent, @@ -31,12 +31,14 @@ from homeassistant.components.assist_pipeline import ( async_pipeline_from_audio_stream, select as pipeline_select, ) +from homeassistant.components.assist_pipeline.audio_enhancer import ( + AudioEnhancer, + MicroVadEnhancer, +) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, VadSensitivity, - VoiceActivityDetector, VoiceCommandSegmenter, - WebRtcVad, ) from homeassistant.const import __version__ from homeassistant.core import Context, HomeAssistant @@ -233,13 +235,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - vad = WebRtcVad() + audio_enhancer = MicroVadEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) speech_detected = await self._wait_for_speech( segmenter, - vad, + audio_enhancer, chunk_buffer, ) if not speech_detected: @@ -253,7 +255,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: async for chunk in self._segment_audio( segmenter, - vad, + audio_enhancer, chunk_buffer, ): yield chunk @@ -317,7 +319,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _wait_for_speech( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: MutableSequence[bytes], ): """Buffer audio chunks until speech is detected. @@ -329,13 +331,17 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: chunk_buffer.append(chunk) - segmenter.process_with_vad(chunk, vad, vad_buffer) + segmenter.process_with_vad( + chunk, + assist_pipeline.SAMPLES_PER_CHUNK, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ) if segmenter.in_command: # Buffer until command starts if len(vad_buffer) > 0: @@ -351,7 +357,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _segment_audio( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: Sequence[bytes], ) -> AsyncIterable[bytes]: """Yield audio chunks until voice command has finished.""" @@ -364,11 +370,15 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: - if not segmenter.process_with_vad(chunk, vad, vad_buffer): + if not segmenter.process_with_vad( + chunk, + assist_pipeline.SAMPLES_PER_CHUNK, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ): # Voice command is finished break @@ -425,13 +435,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): sample_channels = wav_file.getnchannels() if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) ): raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" ) audio_bytes = wav_file.readframes(wav_file.getnframes()) diff --git a/homeassistant/components/wake_on_lan/__init__.py b/homeassistant/components/wake_on_lan/__init__.py index 37837da683a..efd72c4564c 100644 --- a/homeassistant/components/wake_on_lan/__init__.py +++ b/homeassistant/components/wake_on_lan/__init__.py @@ -6,12 +6,13 @@ import logging import voluptuous as vol import wakeonlan +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import DOMAIN +from .const import DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) @@ -43,7 +44,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if broadcast_port is not None: service_kwargs["port"] = broadcast_port - _LOGGER.info( + _LOGGER.debug( "Send magic packet to mac %s (broadcast: %s, port: %s)", mac_address, broadcast_address, @@ -62,3 +63,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a Wake on LAN component entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/wake_on_lan/button.py b/homeassistant/components/wake_on_lan/button.py new file mode 100644 index 00000000000..39c4511868d --- /dev/null +++ b/homeassistant/components/wake_on_lan/button.py @@ -0,0 +1,87 @@ +"""Support for button entity in wake on lan.""" + +from __future__ import annotations + +from functools import partial +import logging +from typing import Any + +import wakeonlan + +from homeassistant.components.button import ButtonEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Wake on LAN button entry.""" + broadcast_address: str | None = entry.options.get(CONF_BROADCAST_ADDRESS) + broadcast_port: int | None = entry.options.get(CONF_BROADCAST_PORT) + mac_address: str = entry.options[CONF_MAC] + name: str = entry.title + + async_add_entities( + [ + WolButton( + name, + mac_address, + broadcast_address, + broadcast_port, + ) + ] + ) + + +class WolButton(ButtonEntity): + """Representation of a wake on lan button.""" + + _attr_name = None + + def __init__( + self, + name: str, + mac_address: str, + broadcast_address: str | None, + broadcast_port: int | None, + ) -> None: + """Initialize the WOL button.""" + self._mac_address = mac_address + self._broadcast_address = broadcast_address + self._broadcast_port = broadcast_port + self._attr_unique_id = dr.format_mac(mac_address) + self._attr_device_info = dr.DeviceInfo( + connections={(dr.CONNECTION_NETWORK_MAC, self._attr_unique_id)}, + identifiers={(DOMAIN, self._attr_unique_id)}, + manufacturer="Wake on LAN", + name=name, + ) + + async def async_press(self) -> None: + """Press the button.""" + service_kwargs: dict[str, Any] = {} + if self._broadcast_address is not None: + service_kwargs["ip_address"] = self._broadcast_address + if self._broadcast_port is not None: + service_kwargs["port"] = self._broadcast_port + + _LOGGER.debug( + "Send magic packet to mac %s (broadcast: %s, port: %s)", + self._mac_address, + self._broadcast_address, + self._broadcast_port, + ) + + await self.hass.async_add_executor_job( + partial(wakeonlan.send_magic_packet, self._mac_address, **service_kwargs) + ) diff --git a/homeassistant/components/wake_on_lan/config_flow.py b/homeassistant/components/wake_on_lan/config_flow.py new file mode 100644 index 00000000000..fb54dd146e5 --- /dev/null +++ b/homeassistant/components/wake_on_lan/config_flow.py @@ -0,0 +1,80 @@ +"""Config flow for Wake on lan integration.""" + +from collections.abc import Mapping +from typing import Any + +import voluptuous as vol + +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) + +from .const import DEFAULT_NAME, DOMAIN + + +async def validate( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate input setup.""" + user_input = await validate_options(handler, user_input) + + user_input[CONF_MAC] = dr.format_mac(user_input[CONF_MAC]) + + # Mac address needs to be unique + handler.parent_handler._async_abort_entries_match({CONF_MAC: user_input[CONF_MAC]}) # noqa: SLF001 + + return user_input + + +async def validate_options( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate input options.""" + if CONF_BROADCAST_PORT in user_input: + # Convert float to int for broadcast port + user_input[CONF_BROADCAST_PORT] = int(user_input[CONF_BROADCAST_PORT]) + return user_input + + +DATA_SCHEMA = {vol.Required(CONF_MAC): TextSelector()} +OPTIONS_SCHEMA = { + vol.Optional(CONF_BROADCAST_ADDRESS): TextSelector(), + vol.Optional(CONF_BROADCAST_PORT): NumberSelector( + NumberSelectorConfig(min=0, max=65535, step=1, mode=NumberSelectorMode.BOX) + ), +} + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=vol.Schema(DATA_SCHEMA).extend(OPTIONS_SCHEMA), + validate_user_input=validate, + ) +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + vol.Schema(OPTIONS_SCHEMA), validate_user_input=validate_options + ), +} + + +class WakeonLanConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Wake on Lan.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + mac: str = options[CONF_MAC] + return f"{DEFAULT_NAME} {mac}" diff --git a/homeassistant/components/wake_on_lan/const.py b/homeassistant/components/wake_on_lan/const.py index 2560ef40382..20b9573cfde 100644 --- a/homeassistant/components/wake_on_lan/const.py +++ b/homeassistant/components/wake_on_lan/const.py @@ -1,3 +1,11 @@ """Constants for the Wake-On-LAN component.""" +from homeassistant.const import Platform + DOMAIN = "wake_on_lan" +PLATFORMS = [Platform.BUTTON] + +CONF_OFF_ACTION = "turn_off" + +DEFAULT_NAME = "Wake on LAN" +DEFAULT_PING_TIMEOUT = 1 diff --git a/homeassistant/components/wake_on_lan/manifest.json b/homeassistant/components/wake_on_lan/manifest.json index a62980b3010..c716a851ae4 100644 --- a/homeassistant/components/wake_on_lan/manifest.json +++ b/homeassistant/components/wake_on_lan/manifest.json @@ -2,6 +2,7 @@ "domain": "wake_on_lan", "name": "Wake on LAN", "codeowners": ["@ntilley905"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/wake_on_lan", "iot_class": "local_push", "requirements": ["wakeonlan==2.1.0"] diff --git a/homeassistant/components/wake_on_lan/strings.json b/homeassistant/components/wake_on_lan/strings.json index 8395bc7503a..89bc30e405a 100644 --- a/homeassistant/components/wake_on_lan/strings.json +++ b/homeassistant/components/wake_on_lan/strings.json @@ -1,20 +1,56 @@ { + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "step": { + "user": { + "data": { + "mac": "MAC address", + "broadcast_address": "Broadcast address", + "broadcast_port": "Broadcast port" + }, + "data_description": { + "mac": "MAC address of the device to wake up.", + "broadcast_address": "The IP address of the host to send the magic packet to. Defaults to `255.255.255.255` and is normally not changed.", + "broadcast_port": "The port to send the magic packet to. Defaults to `9` and is normally not changed." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + }, + "step": { + "init": { + "data": { + "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", + "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]" + }, + "data_description": { + "broadcast_address": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]", + "broadcast_port": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" + } + } + } + }, "services": { "send_magic_packet": { "name": "Send magic packet", "description": "Sends a 'magic packet' to wake up a device with 'Wake-On-LAN' capabilities.", "fields": { "mac": { - "name": "MAC address", - "description": "MAC address of the device to wake up." + "name": "[%key:component::wake_on_lan::config::step::user::data::mac%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::mac%]" }, "broadcast_address": { - "name": "Broadcast address", - "description": "Broadcast IP where to send the magic packet." + "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_address%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_address%]" }, "broadcast_port": { - "name": "Broadcast port", - "description": "Port where to send the magic packet." + "name": "[%key:component::wake_on_lan::config::step::user::data::broadcast_port%]", + "description": "[%key:component::wake_on_lan::config::step::user::data_description::broadcast_port%]" } } } diff --git a/homeassistant/components/wake_on_lan/switch.py b/homeassistant/components/wake_on_lan/switch.py index cf38d05de38..f4949ec6901 100644 --- a/homeassistant/components/wake_on_lan/switch.py +++ b/homeassistant/components/wake_on_lan/switch.py @@ -27,15 +27,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DOMAIN +from .const import CONF_OFF_ACTION, DEFAULT_NAME, DEFAULT_PING_TIMEOUT, DOMAIN _LOGGER = logging.getLogger(__name__) -CONF_OFF_ACTION = "turn_off" - -DEFAULT_NAME = "Wake on LAN" -DEFAULT_PING_TIMEOUT = 1 - PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { vol.Required(CONF_MAC): cv.string, @@ -48,10 +43,10 @@ PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up a wake on lan switch.""" @@ -62,7 +57,7 @@ def setup_platform( name: str = config[CONF_NAME] off_action: list[Any] | None = config.get(CONF_OFF_ACTION) - add_entities( + async_add_entities( [ WolSwitch( hass, diff --git a/homeassistant/components/wallbox/manifest.json b/homeassistant/components/wallbox/manifest.json index ce9008ef8bb..63102646508 100644 --- a/homeassistant/components/wallbox/manifest.json +++ b/homeassistant/components/wallbox/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/wallbox", "iot_class": "cloud_polling", "loggers": ["wallbox"], - "requirements": ["wallbox==0.6.0"] + "requirements": ["wallbox==0.7.0"] } diff --git a/homeassistant/components/weather/__init__.py b/homeassistant/components/weather/__init__.py index 468c023b470..dab3394426e 100644 --- a/homeassistant/components/weather/__init__.py +++ b/homeassistant/components/weather/__init__.py @@ -922,7 +922,6 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """Start subscription to forecast_type.""" - return None @callback def _async_subscription_ended( @@ -930,7 +929,6 @@ class WeatherEntity(Entity, PostInit, cached_properties=CACHED_PROPERTIES_WITH_A forecast_type: Literal["daily", "hourly", "twice_daily"], ) -> None: """End subscription to forecast_type.""" - return None @final @callback diff --git a/homeassistant/components/weatherflow_cloud/__init__.py b/homeassistant/components/weatherflow_cloud/__init__.py index a40386100e7..8dc26f9b9c6 100644 --- a/homeassistant/components/weatherflow_cloud/__init__.py +++ b/homeassistant/components/weatherflow_cloud/__init__.py @@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant from .const import DOMAIN from .coordinator import WeatherFlowCloudDataUpdateCoordinator -PLATFORMS: list[Platform] = [Platform.WEATHER] +PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WEATHER] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/weatherflow_cloud/coordinator.py b/homeassistant/components/weatherflow_cloud/coordinator.py index 78b4f3be223..8b8a916262f 100644 --- a/homeassistant/components/weatherflow_cloud/coordinator.py +++ b/homeassistant/components/weatherflow_cloud/coordinator.py @@ -21,12 +21,11 @@ class WeatherFlowCloudDataUpdateCoordinator( def __init__(self, hass: HomeAssistant, api_token: str) -> None: """Initialize global WeatherFlow forecast data updater.""" self.weather_api = WeatherFlowRestAPI(api_token=api_token) - super().__init__( hass, LOGGER, name=DOMAIN, - update_interval=timedelta(minutes=15), + update_interval=timedelta(seconds=60), ) async def _async_update_data(self) -> dict[int, WeatherFlowDataREST]: diff --git a/homeassistant/components/weatherflow_cloud/entity.py b/homeassistant/components/weatherflow_cloud/entity.py new file mode 100644 index 00000000000..46077ab0870 --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/entity.py @@ -0,0 +1,38 @@ +"""Base entity class for WeatherFlow Cloud integration.""" + +from weatherflow4py.models.rest.unified import WeatherFlowDataREST + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER +from .coordinator import WeatherFlowCloudDataUpdateCoordinator + + +class WeatherFlowCloudEntity(CoordinatorEntity[WeatherFlowCloudDataUpdateCoordinator]): + """Base entity class to use for everything.""" + + _attr_attribution = ATTR_ATTRIBUTION + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WeatherFlowCloudDataUpdateCoordinator, + station_id: int, + ) -> None: + """Class initializer.""" + super().__init__(coordinator) + self.station_id = station_id + + self._attr_device_info = DeviceInfo( + name=self.station.station.name, + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, str(station_id))}, + manufacturer=MANUFACTURER, + configuration_url=f"https://tempestwx.com/station/{station_id}/grid", + ) + + @property + def station(self) -> WeatherFlowDataREST: + """Individual Station data.""" + return self.coordinator.data[self.station_id] diff --git a/homeassistant/components/weatherflow_cloud/icons.json b/homeassistant/components/weatherflow_cloud/icons.json new file mode 100644 index 00000000000..19e6ac56821 --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/icons.json @@ -0,0 +1,42 @@ +{ + "entity": { + "sensor": { + "air_temperature": { + "default": "mdi:thermometer" + }, + "air_density": { + "default": "mdi:format-line-weight" + }, + "feels_like": { + "default": "mdi:thermometer" + }, + "heat_index": { + "default": "mdi:sun-thermometer" + }, + "wet_bulb_temperature": { + "default": "mdi:thermometer-water" + }, + "wet_bulb_globe_temperature": { + "default": "mdi:thermometer-water" + }, + "lightning_strike_count": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_count_last_1hr": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_count_last_3hr": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_last_distance": { + "default": "mdi:lightning-bolt" + }, + "lightning_strike_last_epoch": { + "default": "mdi:lightning-bolt" + }, + "wind_chill": { + "default": "mdi:snowflake-thermometer" + } + } + } +} diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 93df04d833c..354b9642c06 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -5,5 +5,6 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", + "loggers": ["weatherflow4py"], "requirements": ["weatherflow4py==0.2.21"] } diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py new file mode 100644 index 00000000000..9314c77a65c --- /dev/null +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -0,0 +1,208 @@ +"""Sensors for cloud based weatherflow.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import UTC, datetime + +from weatherflow4py.models.rest.observation import Observation + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import UnitOfLength, UnitOfPressure, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from .const import DOMAIN +from .coordinator import WeatherFlowCloudDataUpdateCoordinator +from .entity import WeatherFlowCloudEntity + + +@dataclass(frozen=True, kw_only=True) +class WeatherFlowCloudSensorEntityDescription( + SensorEntityDescription, +): + """Describes a weatherflow sensor.""" + + value_fn: Callable[[Observation], StateType | datetime] + + +WF_SENSORS: tuple[WeatherFlowCloudSensorEntityDescription, ...] = ( + # Air Sensors + WeatherFlowCloudSensorEntityDescription( + key="air_density", + translation_key="air_density", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=5, + value_fn=lambda data: data.air_density, + native_unit_of_measurement="kg/m³", + ), + # Temp Sensors + WeatherFlowCloudSensorEntityDescription( + key="air_temperature", + translation_key="air_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.air_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="dew_point", + translation_key="dew_point", + value_fn=lambda data: data.dew_point, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + ), + WeatherFlowCloudSensorEntityDescription( + key="feels_like", + translation_key="feels_like", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.feels_like, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="heat_index", + translation_key="heat_index", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.heat_index, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wind_chill", + translation_key="wind_chill", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wind_chill, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wet_bulb_temperature", + translation_key="wet_bulb_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wet_bulb_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + WeatherFlowCloudSensorEntityDescription( + key="wet_bulb_globe_temperature", + translation_key="wet_bulb_globe_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda data: data.wet_bulb_globe_temperature, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + # Pressure Sensors + WeatherFlowCloudSensorEntityDescription( + key="barometric_pressure", + translation_key="barometric_pressure", + value_fn=lambda data: data.barometric_pressure, + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=3, + ), + WeatherFlowCloudSensorEntityDescription( + key="sea_level_pressure", + translation_key="sea_level_pressure", + value_fn=lambda data: data.sea_level_pressure, + native_unit_of_measurement=UnitOfPressure.MBAR, + device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=3, + ), + # Lightning Sensors + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count", + translation_key="lightning_strike_count", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count_last_1hr", + translation_key="lightning_strike_count_last_1hr", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count_last_1hr, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_count_last_3hr", + translation_key="lightning_strike_count_last_3hr", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.lightning_strike_count_last_3hr, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_last_distance", + translation_key="lightning_strike_last_distance", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.KILOMETERS, + value_fn=lambda data: data.lightning_strike_last_distance, + ), + WeatherFlowCloudSensorEntityDescription( + key="lightning_strike_last_epoch", + translation_key="lightning_strike_last_epoch", + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: datetime.fromtimestamp( + data.lightning_strike_last_epoch, tz=UTC + ), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up WeatherFlow sensors based on a config entry.""" + + coordinator: WeatherFlowCloudDataUpdateCoordinator = hass.data[DOMAIN][ + entry.entry_id + ] + + stations = coordinator.data.keys() + + async_add_entities( + WeatherFlowCloudSensor(coordinator, sensor_description, station_id) + for station_id in stations + for sensor_description in WF_SENSORS + ) + + +class WeatherFlowCloudSensor(WeatherFlowCloudEntity, SensorEntity): + """Implementation of a WeatherFlow sensor.""" + + entity_description: WeatherFlowCloudSensorEntityDescription + + def __init__( + self, + coordinator: WeatherFlowCloudDataUpdateCoordinator, + description: WeatherFlowCloudSensorEntityDescription, + station_id: int, + ) -> None: + """Initialize the sensor.""" + # Initialize the Entity Class + super().__init__(coordinator, station_id) + self.entity_description = description + self._attr_unique_id = f"{station_id}_{description.key}" + + @property + def native_value(self) -> StateType | datetime: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.station.observation.obs[0]) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index 782b0dcf960..df561c8b753 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -23,5 +23,65 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } + }, + "entity": { + "sensor": { + "air_density": { + "name": "Air density" + }, + "barometric_pressure": { + "name": "Pressure barometric" + }, + "sea_level_pressure": { + "name": "Pressure sea level" + }, + + "dew_point": { + "name": "Dew point" + }, + "lightning_strike_count": { + "name": "Lightning count" + }, + "lightning_strike_count_last_1hr": { + "name": "Lightning count last 1 hr" + }, + "lightning_strike_count_last_3hr": { + "name": "Lightning count last 3 hr" + }, + "lightning_strike_last_distance": { + "name": "Lightning last distance" + }, + "lightning_strike_last_epoch": { + "name": "Lightning last strike" + }, + + "wind_chill": { + "name": "Wind chill" + }, + "wind_direction": { + "name": "Wind direction" + }, + "wind_direction_cardinal": { + "name": "Wind direction (cardinal)" + }, + "wind_gust": { + "name": "Wind gust" + }, + "wind_lull": { + "name": "Wind lull" + }, + "feels_like": { + "name": "Feels like" + }, + "heat_index": { + "name": "Heat index" + }, + "wet_bulb_temperature": { + "name": "Wet bulb temperature" + }, + "wet_bulb_globe_temperature": { + "name": "Wet bulb globe temperature" + } + } } } diff --git a/homeassistant/components/weatherflow_cloud/weather.py b/homeassistant/components/weatherflow_cloud/weather.py index 47e2b6a28df..c475f2974a9 100644 --- a/homeassistant/components/weatherflow_cloud/weather.py +++ b/homeassistant/components/weatherflow_cloud/weather.py @@ -17,11 +17,11 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import ATTR_ATTRIBUTION, DOMAIN, MANUFACTURER, STATE_MAP +from .const import DOMAIN, STATE_MAP from .coordinator import WeatherFlowCloudDataUpdateCoordinator +from .entity import WeatherFlowCloudEntity async def async_setup_entry( @@ -43,13 +43,11 @@ async def async_setup_entry( class WeatherFlowWeather( - SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator] + WeatherFlowCloudEntity, + SingleCoordinatorWeatherEntity[WeatherFlowCloudDataUpdateCoordinator], ): """Implementation of a WeatherFlow weather condition.""" - _attr_attribution = ATTR_ATTRIBUTION - _attr_has_entity_name = True - _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS _attr_native_pressure_unit = UnitOfPressure.MBAR @@ -65,19 +63,9 @@ class WeatherFlowWeather( station_id: int, ) -> None: """Initialise the platform with a data instance and station.""" - super().__init__(coordinator) - - self.station_id = station_id + super().__init__(coordinator, station_id) self._attr_unique_id = f"weatherflow_forecast_{station_id}" - self._attr_device_info = DeviceInfo( - name=self.local_data.station.name, - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"{station_id}")}, - manufacturer=MANUFACTURER, - configuration_url=f"https://tempestwx.com/station/{station_id}/grid", - ) - @property def local_data(self) -> WeatherFlowDataREST: """Return the local weather data object for this station.""" @@ -98,7 +86,6 @@ class WeatherFlowWeather( """Return the Air Pressure @ Station.""" return self.local_data.weather.current_conditions.station_pressure - # @property def humidity(self) -> float | None: """Return the humidity.""" diff --git a/homeassistant/components/webhook/manifest.json b/homeassistant/components/webhook/manifest.json index c2795e8ac17..43f5321d9f6 100644 --- a/homeassistant/components/webhook/manifest.json +++ b/homeassistant/components/webhook/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/webhook", + "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/components/webmin/config_flow.py b/homeassistant/components/webmin/config_flow.py index 5fa3aefb048..3f55bbd9110 100644 --- a/homeassistant/components/webmin/config_flow.py +++ b/homeassistant/components/webmin/config_flow.py @@ -53,9 +53,10 @@ async def validate_user_input( except Exception as err: raise SchemaFlowError("unknown") from err - await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( - get_sorted_mac_addresses(data)[0] - ) + if len(mac_addresses := get_sorted_mac_addresses(data)) > 0: + await cast(SchemaConfigFlowHandler, handler.parent_handler).async_set_unique_id( + mac_addresses[0] + ) return user_input diff --git a/homeassistant/components/webmin/coordinator.py b/homeassistant/components/webmin/coordinator.py index dab5e495c1a..45261787e75 100644 --- a/homeassistant/components/webmin/coordinator.py +++ b/homeassistant/components/webmin/coordinator.py @@ -23,6 +23,7 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """The Webmin data update coordinator.""" mac_address: str + unique_id: str def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the Webmin data update coordinator.""" @@ -41,14 +42,19 @@ class WebminUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def async_setup(self) -> None: """Provide needed data to the device info.""" mac_addresses = get_sorted_mac_addresses(self.data) - self.mac_address = mac_addresses[0] - self.device_info[ATTR_CONNECTIONS] = { - (CONNECTION_NETWORK_MAC, format_mac(mac_address)) - for mac_address in mac_addresses - } - self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses - } + if len(mac_addresses) > 0: + self.mac_address = mac_addresses[0] + self.unique_id = self.mac_address + self.device_info[ATTR_CONNECTIONS] = { + (CONNECTION_NETWORK_MAC, format_mac(mac_address)) + for mac_address in mac_addresses + } + self.device_info[ATTR_IDENTIFIERS] = { + (DOMAIN, format_mac(mac_address)) for mac_address in mac_addresses + } + else: + assert self.config_entry + self.unique_id = self.config_entry.entry_id async def _async_update_data(self) -> dict[str, Any]: data = await self.instance.update() diff --git a/homeassistant/components/webmin/sensor.py b/homeassistant/components/webmin/sensor.py index cf1a9845c02..785140393a2 100644 --- a/homeassistant/components/webmin/sensor.py +++ b/homeassistant/components/webmin/sensor.py @@ -235,7 +235,7 @@ class WebminSensor(CoordinatorEntity[WebminUpdateCoordinator], SensorEntity): super().__init__(coordinator) self.entity_description = description self._attr_device_info = coordinator.device_info - self._attr_unique_id = f"{coordinator.mac_address}_{description.key}" + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" @property def native_value(self) -> int | float: diff --git a/homeassistant/components/websocket_api/decorators.py b/homeassistant/components/websocket_api/decorators.py index b9924bc91d1..2c8a6cc02f1 100644 --- a/homeassistant/components/websocket_api/decorators.py +++ b/homeassistant/components/websocket_api/decorators.py @@ -145,7 +145,7 @@ def websocket_command( def decorate(func: const.WebSocketCommandHandler) -> const.WebSocketCommandHandler: """Decorate ws command function.""" - if is_dict and len(schema) == 1: # type only empty schema + if is_dict and len(schema) == 1: # type: ignore[arg-type] # type only empty schema func._ws_schema = False # type: ignore[attr-defined] # noqa: SLF001 elif is_dict: func._ws_schema = messages.BASE_COMMAND_MESSAGE_SCHEMA.extend(schema) # type: ignore[attr-defined] # noqa: SLF001 diff --git a/homeassistant/components/websocket_api/manifest.json b/homeassistant/components/websocket_api/manifest.json index 116bd0ccee8..315411ea4cf 100644 --- a/homeassistant/components/websocket_api/manifest.json +++ b/homeassistant/components/websocket_api/manifest.json @@ -1,7 +1,6 @@ { "domain": "websocket_api", "name": "Home Assistant WebSocket API", - "after_dependencies": ["recorder"], "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/websocket_api", diff --git a/homeassistant/components/websocket_api/strings.json b/homeassistant/components/websocket_api/strings.json index 10b95637b6b..afef732b8f5 100644 --- a/homeassistant/components/websocket_api/strings.json +++ b/homeassistant/components/websocket_api/strings.json @@ -1,7 +1,7 @@ { "exceptions": { "child_service_not_found": { - "message": "Service {domain}.{service} called service {child_domain}.{child_service} which was not found." + "message": "Action {domain}.{service} uses action {child_domain}.{child_service} which was not found." } } } diff --git a/homeassistant/components/wemo/entity.py b/homeassistant/components/wemo/entity.py index db64aa3137e..16ab3ae1173 100644 --- a/homeassistant/components/wemo/entity.py +++ b/homeassistant/components/wemo/entity.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator import contextlib import logging from pywemo.exceptions import ActionException -from typing_extensions import Generator from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index e1b9aaf2388..b7c9840bcdc 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -74,9 +74,14 @@ async def async_setup_entry( class WemoHumidifier(WemoBinaryStateEntity, FanEntity): """Representation of a WeMo humidifier.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) wemo: Humidifier _last_fan_on_mode: FanMode + _enable_turn_on_off_backwards_compatibility = False def __init__(self, coordinator: DeviceCoordinator) -> None: """Initialize the WeMo switch.""" diff --git a/homeassistant/components/wiffi/binary_sensor.py b/homeassistant/components/wiffi/binary_sensor.py index 23aebd122f2..80088f373b4 100644 --- a/homeassistant/components/wiffi/binary_sensor.py +++ b/homeassistant/components/wiffi/binary_sensor.py @@ -17,7 +17,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setup has been called + Called by the HA framework after async_forward_entry_setups has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wiffi/sensor.py b/homeassistant/components/wiffi/sensor.py index 7b64628085a..cf8cf8719c3 100644 --- a/homeassistant/components/wiffi/sensor.py +++ b/homeassistant/components/wiffi/sensor.py @@ -45,7 +45,7 @@ async def async_setup_entry( ) -> None: """Set up platform for a new integration. - Called by the HA framework after async_forward_entry_setup has been called + Called by the HA framework after async_forward_entry_setups has been called during initialization of a new integration (= wiffi). """ diff --git a/homeassistant/components/wilight/fan.py b/homeassistant/components/wilight/fan.py index 5c05575c4f8..71559658c35 100644 --- a/homeassistant/components/wilight/fan.py +++ b/homeassistant/components/wilight/fan.py @@ -57,7 +57,13 @@ class WiLightFan(WiLightDevice, FanEntity): _attr_name = None _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + ) + _enable_turn_on_off_backwards_compatibility = False def __init__(self, api_device: PyWiLightDevice, index: str, item_name: str) -> None: """Initialize the device.""" diff --git a/homeassistant/components/wiz/__init__.py b/homeassistant/components/wiz/__init__.py index 79c317f178b..1bf3188e9e9 100644 --- a/homeassistant/components/wiz/__init__.py +++ b/homeassistant/components/wiz/__init__.py @@ -31,6 +31,8 @@ from .const import ( from .discovery import async_discover_devices, async_trigger_discovery from .models import WizData +type WizConfigEntry = ConfigEntry[WizData] + _LOGGER = logging.getLogger(__name__) PLATFORMS = [ @@ -135,9 +137,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await bulb.start_push(_async_push_update) bulb.set_discovery_callback(lambda bulb: async_trigger_discovery(hass, [bulb])) - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = WizData( - coordinator=coordinator, bulb=bulb, scenes=scenes - ) + entry.runtime_data = WizData(coordinator=coordinator, bulb=bulb, scenes=scenes) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) @@ -147,6 +147,5 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - data: WizData = hass.data[DOMAIN].pop(entry.entry_id) - await data.bulb.async_close() + await entry.runtime_data.bulb.async_close() return unload_ok diff --git a/homeassistant/components/wiz/binary_sensor.py b/homeassistant/components/wiz/binary_sensor.py index b58e120a9dd..3411ee200b9 100644 --- a/homeassistant/components/wiz/binary_sensor.py +++ b/homeassistant/components/wiz/binary_sensor.py @@ -10,13 +10,13 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import WizConfigEntry from .const import DOMAIN, SIGNAL_WIZ_PIR from .entity import WizEntity from .models import WizData @@ -26,17 +26,16 @@ OCCUPANCY_UNIQUE_ID = "{}_occupancy" async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ binary sensor platform.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - mac = wiz_data.bulb.mac + mac = entry.runtime_data.bulb.mac if er.async_get(hass).async_get_entity_id( Platform.BINARY_SENSOR, DOMAIN, OCCUPANCY_UNIQUE_ID.format(mac) ): - async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) + async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) return cancel_dispatcher: Callable[[], None] | None = None @@ -47,7 +46,7 @@ async def async_setup_entry( assert cancel_dispatcher is not None cancel_dispatcher() cancel_dispatcher = None - async_add_entities([WizOccupancyEntity(wiz_data, entry.title)]) + async_add_entities([WizOccupancyEntity(entry.runtime_data, entry.title)]) cancel_dispatcher = async_dispatcher_connect( hass, SIGNAL_WIZ_PIR.format(mac), _async_add_occupancy_sensor diff --git a/homeassistant/components/wiz/diagnostics.py b/homeassistant/components/wiz/diagnostics.py index 5f617ebafe9..c58751c7fc0 100644 --- a/homeassistant/components/wiz/diagnostics.py +++ b/homeassistant/components/wiz/diagnostics.py @@ -5,24 +5,21 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .models import WizData +from . import WizConfigEntry TO_REDACT = {"roomId", "homeId"} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: WizConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] return { "entry": { "title": entry.title, "data": dict(entry.data), }, - "data": async_redact_data(wiz_data.bulb.diagnostics, TO_REDACT), + "data": async_redact_data(entry.runtime_data.bulb.diagnostics, TO_REDACT), } diff --git a/homeassistant/components/wiz/light.py b/homeassistant/components/wiz/light.py index aece184720d..a3f36d580d2 100644 --- a/homeassistant/components/wiz/light.py +++ b/homeassistant/components/wiz/light.py @@ -19,7 +19,6 @@ from homeassistant.components.light import ( LightEntityFeature, filter_supported_color_modes, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.color import ( @@ -27,7 +26,7 @@ from homeassistant.util.color import ( color_temperature_mired_to_kelvin, ) -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizToggleEntity from .models import WizData @@ -61,13 +60,12 @@ def _async_pilot_builder(**kwargs: Any) -> PilotBuilder: async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ Platform from config_flow.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - if wiz_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: - async_add_entities([WizBulbEntity(wiz_data, entry.title)]) + if entry.runtime_data.bulb.bulbtype.bulb_type != BulbClass.SOCKET: + async_add_entities([WizBulbEntity(entry.runtime_data, entry.title)]) class WizBulbEntity(WizToggleEntity, LightEntity): diff --git a/homeassistant/components/wiz/number.py b/homeassistant/components/wiz/number.py index 46708ac001e..0591e854d7d 100644 --- a/homeassistant/components/wiz/number.py +++ b/homeassistant/components/wiz/number.py @@ -13,12 +13,11 @@ from homeassistant.components.number import ( NumberEntityDescription, NumberMode, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizEntity from .models import WizData @@ -68,15 +67,16 @@ NUMBERS: tuple[WizNumberEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz speed number.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] async_add_entities( - WizSpeedNumber(wiz_data, entry.title, description) + WizSpeedNumber(entry.runtime_data, entry.title, description) for description in NUMBERS - if getattr(wiz_data.bulb.bulbtype.features, description.required_feature) + if getattr( + entry.runtime_data.bulb.bulbtype.features, description.required_feature + ) ) diff --git a/homeassistant/components/wiz/sensor.py b/homeassistant/components/wiz/sensor.py index aae443e60d0..eb77686a5cf 100644 --- a/homeassistant/components/wiz/sensor.py +++ b/homeassistant/components/wiz/sensor.py @@ -8,7 +8,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -17,7 +16,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizEntity from .models import WizData @@ -45,18 +44,18 @@ POWER_SENSORS: tuple[SensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the wiz sensor.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] entities = [ - WizSensor(wiz_data, entry.title, description) for description in SENSORS + WizSensor(entry.runtime_data, entry.title, description) + for description in SENSORS ] - if wiz_data.coordinator.data is not None: + if entry.runtime_data.coordinator.data is not None: entities.extend( [ - WizPowerSensor(wiz_data, entry.title, description) + WizPowerSensor(entry.runtime_data, entry.title, description) for description in POWER_SENSORS ] ) diff --git a/homeassistant/components/wiz/switch.py b/homeassistant/components/wiz/switch.py index d94bf12da9f..4c089d2d6d2 100644 --- a/homeassistant/components/wiz/switch.py +++ b/homeassistant/components/wiz/switch.py @@ -8,24 +8,22 @@ from pywizlight import PilotBuilder from pywizlight.bulblibrary import BulbClass from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import WizConfigEntry from .entity import WizToggleEntity from .models import WizData async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: WizConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the WiZ switch platform.""" - wiz_data: WizData = hass.data[DOMAIN][entry.entry_id] - if wiz_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: - async_add_entities([WizSocketEntity(wiz_data, entry.title)]) + if entry.runtime_data.bulb.bulbtype.bulb_type == BulbClass.SOCKET: + async_add_entities([WizSocketEntity(entry.runtime_data, entry.title)]) class WizSocketEntity(WizToggleEntity, SwitchEntity): diff --git a/homeassistant/components/wled/__init__.py b/homeassistant/components/wled/__init__.py index ba87fb58122..b4834347694 100644 --- a/homeassistant/components/wled/__init__.py +++ b/homeassistant/components/wled/__init__.py @@ -5,9 +5,12 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.typing import ConfigType +from homeassistant.util.hass_dict import HassKey -from .const import LOGGER -from .coordinator import WLEDDataUpdateCoordinator +from .const import DOMAIN +from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator PLATFORMS = ( Platform.BUTTON, @@ -21,23 +24,26 @@ PLATFORMS = ( type WLEDConfigEntry = ConfigEntry[WLEDDataUpdateCoordinator] +WLED_KEY: HassKey[WLEDReleasesDataUpdateCoordinator] = HassKey(DOMAIN) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up the WLED integration. + + We set up a single coordinator for fetching WLED releases, which + is used across all WLED devices (and config entries) to avoid + fetching the same data multiple times for each. + """ + hass.data[WLED_KEY] = WLEDReleasesDataUpdateCoordinator(hass) + await hass.data[WLED_KEY].async_request_refresh() + return True + async def async_setup_entry(hass: HomeAssistant, entry: WLEDConfigEntry) -> bool: """Set up WLED from a config entry.""" - coordinator = WLEDDataUpdateCoordinator(hass, entry=entry) - await coordinator.async_config_entry_first_refresh() - - if coordinator.data.info.leds.cct: - LOGGER.error( - ( - "WLED device '%s' has a CCT channel, which is not supported by " - "this integration" - ), - entry.title, - ) - return False - - entry.runtime_data = coordinator + entry.runtime_data = WLEDDataUpdateCoordinator(hass, entry=entry) + await entry.runtime_data.async_config_entry_first_refresh() # Set up all platforms for this device/entry. await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index c40753b686a..7853ad2101e 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -46,8 +46,6 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: errors["base"] = "cannot_connect" else: - if device.info.leds.cct: - return self.async_abort(reason="cct_unsupported") await self.async_set_unique_id(device.info.mac_address) self._abort_if_unique_id_configured( updates={CONF_HOST: user_input[CONF_HOST]} @@ -84,9 +82,6 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: return self.async_abort(reason="cannot_connect") - if self.discovered_device.info.leds.cct: - return self.async_abort(reason="cct_unsupported") - await self.async_set_unique_id(self.discovered_device.info.mac_address) self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host}) diff --git a/homeassistant/components/wled/const.py b/homeassistant/components/wled/const.py index f698347537c..69ff6ccb1fa 100644 --- a/homeassistant/components/wled/const.py +++ b/homeassistant/components/wled/const.py @@ -3,17 +3,23 @@ from datetime import timedelta import logging +from wled import LightCapability + +from homeassistant.components.light import ColorMode + # Integration domain DOMAIN = "wled" LOGGER = logging.getLogger(__package__) SCAN_INTERVAL = timedelta(seconds=10) +RELEASES_SCAN_INTERVAL = timedelta(hours=3) # Options CONF_KEEP_MAIN_LIGHT = "keep_master_light" DEFAULT_KEEP_MAIN_LIGHT = False # Attributes +ATTR_CCT = "cct" ATTR_COLOR_PRIMARY = "color_primary" ATTR_DURATION = "duration" ATTR_FADE = "fade" @@ -24,3 +30,76 @@ ATTR_SOFTWARE_VERSION = "sw_version" ATTR_SPEED = "speed" ATTR_TARGET_BRIGHTNESS = "target_brightness" ATTR_UDP_PORT = "udp_port" + +# Static values +COLOR_TEMP_K_MIN = 2000 +COLOR_TEMP_K_MAX = 6535 + + +LIGHT_CAPABILITIES_COLOR_MODE_MAPPING: dict[LightCapability, list[ColorMode]] = { + LightCapability.NONE: [ + ColorMode.ONOFF, + ], + LightCapability.RGB_COLOR: [ + ColorMode.RGB, + ], + LightCapability.WHITE_CHANNEL: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR | LightCapability.WHITE_CHANNEL: [ + ColorMode.RGBW, + ], + LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ], + LightCapability.RGB_COLOR | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.RGBWW, + ], + LightCapability.WHITE_CHANNEL | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE: [ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ], + LightCapability.MANUAL_WHITE: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ], + LightCapability.WHITE_CHANNEL | LightCapability.MANUAL_WHITE: [ + ColorMode.BRIGHTNESS, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.WHITE, + ], + LightCapability.COLOR_TEMPERATURE | LightCapability.MANUAL_WHITE: [ + ColorMode.COLOR_TEMP, + ColorMode.WHITE, + ], + LightCapability.RGB_COLOR + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.COLOR_TEMP, + ], + LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.COLOR_TEMP, + ColorMode.WHITE, + ], + LightCapability.RGB_COLOR + | LightCapability.WHITE_CHANNEL + | LightCapability.COLOR_TEMPERATURE + | LightCapability.MANUAL_WHITE: [ + ColorMode.RGBW, + ColorMode.COLOR_TEMP, + ], +} diff --git a/homeassistant/components/wled/coordinator.py b/homeassistant/components/wled/coordinator.py index f6219c63cb8..cb39fde5e5a 100644 --- a/homeassistant/components/wled/coordinator.py +++ b/homeassistant/components/wled/coordinator.py @@ -2,7 +2,14 @@ from __future__ import annotations -from wled import WLED, Device as WLEDDevice, WLEDConnectionClosedError, WLEDError +from wled import ( + WLED, + Device as WLEDDevice, + Releases, + WLEDConnectionClosedError, + WLEDError, + WLEDReleases, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP @@ -15,6 +22,7 @@ from .const import ( DEFAULT_KEEP_MAIN_LIGHT, DOMAIN, LOGGER, + RELEASES_SCAN_INTERVAL, SCAN_INTERVAL, ) @@ -101,17 +109,37 @@ class WLEDDataUpdateCoordinator(DataUpdateCoordinator[WLEDDevice]): async def _async_update_data(self) -> WLEDDevice: """Fetch data from WLED.""" try: - device = await self.wled.update(full_update=not self.last_update_success) + device = await self.wled.update() except WLEDError as error: raise UpdateFailed(f"Invalid response from API: {error}") from error # If the device supports a WebSocket, try activating it. if ( device.info.websocket is not None - and device.info.leds.cct is not True and not self.wled.connected and not self.unsub ): self._use_websocket() return device + + +class WLEDReleasesDataUpdateCoordinator(DataUpdateCoordinator[Releases]): + """Class to manage fetching WLED releases.""" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize global WLED releases updater.""" + self.wled = WLEDReleases(session=async_get_clientsession(hass)) + super().__init__( + hass, + LOGGER, + name=DOMAIN, + update_interval=RELEASES_SCAN_INTERVAL, + ) + + async def _async_update_data(self) -> Releases: + """Fetch release data from WLED.""" + try: + return await self.wled.releases() + except WLEDError as error: + raise UpdateFailed(f"Invalid response from GitHub API: {error}") from error diff --git a/homeassistant/components/wled/diagnostics.py b/homeassistant/components/wled/diagnostics.py index e81760e0f72..732cd3602a0 100644 --- a/homeassistant/components/wled/diagnostics.py +++ b/homeassistant/components/wled/diagnostics.py @@ -17,31 +17,23 @@ async def async_get_config_entry_diagnostics( coordinator = entry.runtime_data data: dict[str, Any] = { - "info": async_redact_data(coordinator.data.info.__dict__, "wifi"), - "state": coordinator.data.state.__dict__, + "info": async_redact_data(coordinator.data.info.to_dict(), "wifi"), + "state": coordinator.data.state.to_dict(), "effects": { - effect.effect_id: effect.name for effect in coordinator.data.effects + effect.effect_id: effect.name + for effect in coordinator.data.effects.values() }, "palettes": { - palette.palette_id: palette.name for palette in coordinator.data.palettes + palette.palette_id: palette.name + for palette in coordinator.data.palettes.values() }, "playlists": { - playlist.playlist_id: { - "name": playlist.name, - "repeat": playlist.repeat, - "shuffle": playlist.shuffle, - "end": playlist.end.preset_id if playlist.end else None, - } - for playlist in coordinator.data.playlists + playlist.playlist_id: playlist.name + for playlist in coordinator.data.playlists.values() }, "presets": { - preset.preset_id: { - "name": preset.name, - "quick_label": preset.quick_label, - "on": preset.on, - "transition": preset.transition, - } - for preset in coordinator.data.presets + preset.preset_id: preset.name + for preset in coordinator.data.presets.values() }, } return data diff --git a/homeassistant/components/wled/helpers.py b/homeassistant/components/wled/helpers.py index 0dd29fdc2a3..216dba67c94 100644 --- a/homeassistant/components/wled/helpers.py +++ b/homeassistant/components/wled/helpers.py @@ -35,3 +35,13 @@ def wled_exception_handler[_WLEDEntityT: WLEDEntity, **_P]( raise HomeAssistantError("Invalid response from WLED API") from error return handler + + +def kelvin_to_255(k: int, min_k: int, max_k: int) -> int: + """Map color temperature in K from minK-maxK to 0-255.""" + return int((k - min_k) / (max_k - min_k) * 255) + + +def kelvin_to_255_reverse(v: int, min_k: int, max_k: int) -> int: + """Map color temperature from 0-255 to minK-maxK K.""" + return int(v / 255 * (max_k - min_k) + min_k) diff --git a/homeassistant/components/wled/light.py b/homeassistant/components/wled/light.py index 36ebd024de3..b4edf10dc58 100644 --- a/homeassistant/components/wled/light.py +++ b/homeassistant/components/wled/light.py @@ -7,6 +7,7 @@ from typing import Any, cast from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, @@ -19,10 +20,18 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ATTR_COLOR_PRIMARY, ATTR_ON, ATTR_SEGMENT_ID +from .const import ( + ATTR_CCT, + ATTR_COLOR_PRIMARY, + ATTR_ON, + ATTR_SEGMENT_ID, + COLOR_TEMP_K_MAX, + COLOR_TEMP_K_MIN, + LIGHT_CAPABILITIES_COLOR_MODE_MAPPING, +) from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity -from .helpers import wled_exception_handler +from .helpers import kelvin_to_255, kelvin_to_255_reverse, wled_exception_handler PARALLEL_UPDATES = 1 @@ -104,6 +113,8 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): _attr_supported_features = LightEntityFeature.EFFECT | LightEntityFeature.TRANSITION _attr_translation_key = "segment" + _attr_min_color_temp_kelvin = COLOR_TEMP_K_MIN + _attr_max_color_temp_kelvin = COLOR_TEMP_K_MAX def __init__( self, @@ -112,8 +123,6 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): ) -> None: """Initialize WLED segment light.""" super().__init__(coordinator=coordinator) - self._rgbw = coordinator.data.info.leds.rgbw - self._wv = coordinator.data.info.leds.wv self._segment = segment # Segment 0 uses a simpler name, which is more natural for when using @@ -127,18 +136,24 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): f"{self.coordinator.data.info.mac_address}_{self._segment}" ) - self._attr_color_mode = ColorMode.RGB - self._attr_supported_color_modes = {ColorMode.RGB} - if self._rgbw and self._wv: - self._attr_color_mode = ColorMode.RGBW - self._attr_supported_color_modes = {ColorMode.RGBW} + if ( + coordinator.data.info.leds.segment_light_capabilities is not None + and ( + color_modes := LIGHT_CAPABILITIES_COLOR_MODE_MAPPING.get( + coordinator.data.info.leds.segment_light_capabilities[segment] + ) + ) + is not None + ): + self._attr_color_mode = color_modes[0] + self._attr_supported_color_modes = set(color_modes) @property def available(self) -> bool: """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -146,20 +161,29 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def rgb_color(self) -> tuple[int, int, int] | None: """Return the color value.""" - return self.coordinator.data.state.segments[self._segment].color_primary[:3] + if not (color := self.coordinator.data.state.segments[self._segment].color): + return None + return color.primary[:3] @property def rgbw_color(self) -> tuple[int, int, int, int] | None: """Return the color value.""" - return cast( - tuple[int, int, int, int], - self.coordinator.data.state.segments[self._segment].color_primary, - ) + if not (color := self.coordinator.data.state.segments[self._segment].color): + return None + return cast(tuple[int, int, int, int], color.primary) + + @property + def color_temp_kelvin(self) -> int | None: + """Return the CT color value in K.""" + cct = self.coordinator.data.state.segments[self._segment].cct + return kelvin_to_255_reverse(cct, COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX) @property def effect(self) -> str | None: """Return the current effect of the light.""" - return self.coordinator.data.state.segments[self._segment].effect.name + return self.coordinator.data.effects[ + int(self.coordinator.data.state.segments[self._segment].effect_id) + ].name @property def brightness(self) -> int | None: @@ -178,7 +202,7 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): @property def effect_list(self) -> list[str]: """Return the list of supported effects.""" - return [effect.name for effect in self.coordinator.data.effects] + return [effect.name for effect in self.coordinator.data.effects.values()] @property def is_on(self) -> bool: @@ -223,6 +247,11 @@ class WLEDSegmentLight(WLEDEntity, LightEntity): if ATTR_RGBW_COLOR in kwargs: data[ATTR_COLOR_PRIMARY] = kwargs[ATTR_RGBW_COLOR] + if ATTR_COLOR_TEMP_KELVIN in kwargs: + data[ATTR_CCT] = kelvin_to_255( + kwargs[ATTR_COLOR_TEMP_KELVIN], COLOR_TEMP_K_MIN, COLOR_TEMP_K_MAX + ) + if ATTR_TRANSITION in kwargs: # WLED uses 100ms per unit, so 10 = 1 second. data[ATTR_TRANSITION] = round(kwargs[ATTR_TRANSITION] * 10) @@ -258,7 +287,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {light.segment_id for light in coordinator.data.state.segments} + segment_ids = { + light.segment_id + for light in coordinator.data.state.segments.values() + if light.segment_id is not None + } new_entities: list[WLEDMainLight | WLEDSegmentLight] = [] # More than 1 segment now? No main? Add main controls diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index a01bbcabdd6..efeb414438d 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.18.0"], + "requirements": ["wled==0.20.1"], "zeroconf": ["_wled._tcp.local."] } diff --git a/homeassistant/components/wled/number.py b/homeassistant/components/wled/number.py index 5af466360bb..225d783bfdb 100644 --- a/homeassistant/components/wled/number.py +++ b/homeassistant/components/wled/number.py @@ -44,7 +44,7 @@ async def async_setup_entry( class WLEDNumberEntityDescription(NumberEntityDescription): """Class describing WLED number entities.""" - value_fn: Callable[[Segment], float | None] + value_fn: Callable[[Segment], int | None] NUMBERS = [ @@ -64,7 +64,7 @@ NUMBERS = [ native_step=1, native_min_value=0, native_max_value=255, - value_fn=lambda segment: segment.intensity, + value_fn=lambda segment: int(segment.intensity), ), ] @@ -100,7 +100,7 @@ class WLEDNumber(WLEDEntity, NumberEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -133,7 +133,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDNumber] = [] diff --git a/homeassistant/components/wled/select.py b/homeassistant/components/wled/select.py index 20b14531ac7..a645b04573c 100644 --- a/homeassistant/components/wled/select.py +++ b/homeassistant/components/wled/select.py @@ -4,7 +4,7 @@ from __future__ import annotations from functools import partial -from wled import Live, Playlist, Preset +from wled import LiveDataOverride from homeassistant.components.select import SelectEntity from homeassistant.const import EntityCategory @@ -56,17 +56,17 @@ class WLEDLiveOverrideSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_live_override" - self._attr_options = [str(live.value) for live in Live] + self._attr_options = [str(live.value) for live in LiveDataOverride] @property def current_option(self) -> str: """Return the current selected live override.""" - return str(self.coordinator.data.state.lor.value) + return str(self.coordinator.data.state.live_data_override.value) @wled_exception_handler async def async_select_option(self, option: str) -> None: """Set WLED state to the selected live override state.""" - await self.coordinator.wled.live(live=Live(int(option))) + await self.coordinator.wled.live(live=LiveDataOverride(int(option))) class WLEDPresetSelect(WLEDEntity, SelectEntity): @@ -79,7 +79,9 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): super().__init__(coordinator=coordinator) self._attr_unique_id = f"{coordinator.data.info.mac_address}_preset" - self._attr_options = [preset.name for preset in self.coordinator.data.presets] + self._attr_options = [ + preset.name for preset in self.coordinator.data.presets.values() + ] @property def available(self) -> bool: @@ -89,9 +91,13 @@ class WLEDPresetSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected preset.""" - if not isinstance(self.coordinator.data.state.preset, Preset): + if not self.coordinator.data.state.preset_id: return None - return self.coordinator.data.state.preset.name + if preset := self.coordinator.data.presets.get( + self.coordinator.data.state.preset_id + ): + return preset.name + return None @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -110,7 +116,7 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_playlist" self._attr_options = [ - playlist.name for playlist in self.coordinator.data.playlists + playlist.name for playlist in self.coordinator.data.playlists.values() ] @property @@ -121,9 +127,13 @@ class WLEDPlaylistSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the currently selected playlist.""" - if not isinstance(self.coordinator.data.state.playlist, Playlist): + if not self.coordinator.data.state.playlist_id: return None - return self.coordinator.data.state.playlist.name + if playlist := self.coordinator.data.playlists.get( + self.coordinator.data.state.playlist_id + ): + return playlist.name + return None @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -150,7 +160,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): self._attr_unique_id = f"{coordinator.data.info.mac_address}_palette_{segment}" self._attr_options = [ - palette.name for palette in self.coordinator.data.palettes + palette.name for palette in self.coordinator.data.palettes.values() ] self._segment = segment @@ -159,7 +169,7 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -167,7 +177,9 @@ class WLEDPaletteSelect(WLEDEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the current selected color palette.""" - return self.coordinator.data.state.segments[self._segment].palette.name + return self.coordinator.data.palettes[ + int(self.coordinator.data.state.segments[self._segment].palette_id) + ].name @wled_exception_handler async def async_select_option(self, option: str) -> None: @@ -182,7 +194,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDPaletteSelect] = [] diff --git a/homeassistant/components/wled/sensor.py b/homeassistant/components/wled/sensor.py index 7d18665a085..4f97c367612 100644 --- a/homeassistant/components/wled/sensor.py +++ b/homeassistant/components/wled/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import datetime from wled import Device as WLEDDevice @@ -71,7 +71,7 @@ SENSORS: tuple[WLEDSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.TIMESTAMP, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, - value_fn=lambda device: (utcnow() - timedelta(seconds=device.info.uptime)), + value_fn=lambda device: (utcnow() - device.info.uptime), ), WLEDSensorEntityDescription( key="free_heap", diff --git a/homeassistant/components/wled/strings.json b/homeassistant/components/wled/strings.json index 9581641f545..50dc0129369 100644 --- a/homeassistant/components/wled/strings.json +++ b/homeassistant/components/wled/strings.json @@ -21,8 +21,7 @@ }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "cct_unsupported": "This WLED device uses CCT channels, which is not supported by this integration" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" } }, "options": { diff --git a/homeassistant/components/wled/switch.py b/homeassistant/components/wled/switch.py index 7ec75b956c0..643834dcdec 100644 --- a/homeassistant/components/wled/switch.py +++ b/homeassistant/components/wled/switch.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import WLEDConfigEntry -from .const import ATTR_DURATION, ATTR_FADE, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT +from .const import ATTR_DURATION, ATTR_TARGET_BRIGHTNESS, ATTR_UDP_PORT from .coordinator import WLEDDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -62,7 +62,6 @@ class WLEDNightlightSwitch(WLEDEntity, SwitchEntity): state = self.coordinator.data.state return { ATTR_DURATION: state.nightlight.duration, - ATTR_FADE: state.nightlight.fade, ATTR_TARGET_BRIGHTNESS: state.nightlight.target_brightness, } @@ -171,7 +170,7 @@ class WLEDReverseSwitch(WLEDEntity, SwitchEntity): """Return True if entity is available.""" try: self.coordinator.data.state.segments[self._segment] - except IndexError: + except KeyError: return False return super().available @@ -199,7 +198,11 @@ def async_update_segments( async_add_entities: AddEntitiesCallback, ) -> None: """Update segments.""" - segment_ids = {segment.segment_id for segment in coordinator.data.state.segments} + segment_ids = { + segment.segment_id + for segment in coordinator.data.state.segments.values() + if segment.segment_id is not None + } new_entities: list[WLEDReverseSwitch] = [] diff --git a/homeassistant/components/wled/update.py b/homeassistant/components/wled/update.py index 05df5fcf54f..384b394ac50 100644 --- a/homeassistant/components/wled/update.py +++ b/homeassistant/components/wled/update.py @@ -12,8 +12,8 @@ from homeassistant.components.update import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import WLEDConfigEntry -from .coordinator import WLEDDataUpdateCoordinator +from . import WLED_KEY, WLEDConfigEntry +from .coordinator import WLEDDataUpdateCoordinator, WLEDReleasesDataUpdateCoordinator from .entity import WLEDEntity from .helpers import wled_exception_handler @@ -24,7 +24,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up WLED update based on a config entry.""" - async_add_entities([WLEDUpdateEntity(entry.runtime_data)]) + async_add_entities([WLEDUpdateEntity(entry.runtime_data, hass.data[WLED_KEY])]) class WLEDUpdateEntity(WLEDEntity, UpdateEntity): @@ -36,11 +36,33 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): ) _attr_title = "WLED" - def __init__(self, coordinator: WLEDDataUpdateCoordinator) -> None: + def __init__( + self, + coordinator: WLEDDataUpdateCoordinator, + releases_coordinator: WLEDReleasesDataUpdateCoordinator, + ) -> None: """Initialize the update entity.""" super().__init__(coordinator=coordinator) + self.releases_coordinator = releases_coordinator self._attr_unique_id = coordinator.data.info.mac_address + async def async_added_to_hass(self) -> None: + """When entity is added to hass. + + Register extra update listener for the releases coordinator. + """ + await super().async_added_to_hass() + self.async_on_remove( + self.releases_coordinator.async_add_listener( + self._handle_coordinator_update + ) + ) + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.releases_coordinator.last_update_success + @property def installed_version(self) -> str | None: """Version currently installed and in use.""" @@ -54,17 +76,17 @@ class WLEDUpdateEntity(WLEDEntity, UpdateEntity): # If we already run a pre-release, we consider being on the beta channel. # Offer beta version upgrade, unless stable is newer if ( - (beta := self.coordinator.data.info.version_latest_beta) is not None + (beta := self.releases_coordinator.data.beta) is not None and (current := self.coordinator.data.info.version) is not None and (current.alpha or current.beta or current.release_candidate) and ( - (stable := self.coordinator.data.info.version_latest_stable) is None - or (stable is not None and stable < beta) + (stable := self.releases_coordinator.data.stable) is None + or (stable is not None and stable < beta and current > stable) ) ): return str(beta) - if (stable := self.coordinator.data.info.version_latest_stable) is not None: + if (stable := self.releases_coordinator.data.stable) is not None: return str(stable) return None diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index 5df8e6c3d75..4635b2209a6 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -6,6 +6,7 @@ from datetime import date, datetime, timedelta from typing import Final from holidays import ( + PUBLIC, HolidayBase, __version__ as python_holidays_version, country_holidays, @@ -35,6 +36,7 @@ from homeassistant.util import dt as dt_util, slugify from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -69,17 +71,28 @@ def validate_dates(holiday_list: list[str]) -> list[str]: def _get_obj_holidays( - country: str | None, province: str | None, year: int, language: str | None + country: str | None, + province: str | None, + year: int, + language: str | None, + categories: list[str] | None, ) -> HolidayBase: """Get the object for the requested country and year.""" if not country: return HolidayBase() + set_categories = None + if categories: + category_list = [PUBLIC] + category_list.extend(categories) + set_categories = tuple(category_list) + obj_holidays: HolidayBase = country_holidays( country, subdiv=province, years=year, language=language, + categories=set_categories, # type: ignore[arg-type] ) if (supported_languages := obj_holidays.supported_languages) and language == "en": for lang in supported_languages: @@ -89,6 +102,7 @@ def _get_obj_holidays( subdiv=province, years=year, language=lang, + categories=set_categories, # type: ignore[arg-type] ) LOGGER.debug("Changing language from %s to %s", language, lang) return obj_holidays @@ -107,10 +121,11 @@ async def async_setup_entry( sensor_name: str = entry.options[CONF_NAME] workdays: list[str] = entry.options[CONF_WORKDAYS] language: str | None = entry.options.get(CONF_LANGUAGE) + categories: list[str] | None = entry.options.get(CONF_CATEGORY) year: int = (dt_util.now() + timedelta(days=days_offset)).year obj_holidays: HolidayBase = await hass.async_add_executor_job( - _get_obj_holidays, country, province, year, language + _get_obj_holidays, country, province, year, language, categories ) calc_add_holidays: list[str] = validate_dates(add_holidays) calc_remove_holidays: list[str] = validate_dates(remove_holidays) diff --git a/homeassistant/components/workday/config_flow.py b/homeassistant/components/workday/config_flow.py index a66a9c51588..ebbc8fb0b99 100644 --- a/homeassistant/components/workday/config_flow.py +++ b/homeassistant/components/workday/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from functools import partial from typing import Any -from holidays import HolidayBase, country_holidays, list_supported_countries +from holidays import PUBLIC, HolidayBase, country_holidays, list_supported_countries import voluptuous as vol from homeassistant.config_entries import ( @@ -36,6 +36,7 @@ from homeassistant.util import dt as dt_util from .const import ( ALLOWED_DAYS, CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_PROVINCE, @@ -86,7 +87,29 @@ def add_province_and_language_to_schema( ), } - return vol.Schema({**DATA_SCHEMA_OPT.schema, **language_schema, **province_schema}) + category_schema = {} + # PUBLIC will always be included and can therefore not be set/removed + _categories = [x for x in _country.supported_categories if x != PUBLIC] + if _categories: + category_schema = { + vol.Optional(CONF_CATEGORY): SelectSelector( + SelectSelectorConfig( + options=_categories, + mode=SelectSelectorMode.DROPDOWN, + multiple=True, + translation_key=CONF_CATEGORY, + ) + ), + } + + return vol.Schema( + { + **DATA_SCHEMA_OPT.schema, + **language_schema, + **province_schema, + **category_schema, + } + ) def _is_valid_date_range(check_date: str, error: type[HomeAssistantError]) -> bool: @@ -256,6 +279,8 @@ class WorkdayConfigFlow(ConfigFlow, domain=DOMAIN): CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], CONF_PROVINCE: combined_input.get(CONF_PROVINCE), } + if CONF_CATEGORY in combined_input: + abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] LOGGER.debug("abort_check in options with %s", combined_input) self._async_abort_entries_match(abort_match) @@ -314,18 +339,19 @@ class WorkdayOptionsFlowHandler(OptionsFlowWithConfigEntry): errors["remove_holidays"] = "remove_holiday_range_error" else: LOGGER.debug("abort_check in options with %s", combined_input) + abort_match = { + CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), + CONF_EXCLUDES: combined_input[CONF_EXCLUDES], + CONF_OFFSET: combined_input[CONF_OFFSET], + CONF_WORKDAYS: combined_input[CONF_WORKDAYS], + CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], + CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], + CONF_PROVINCE: combined_input.get(CONF_PROVINCE), + } + if CONF_CATEGORY in combined_input: + abort_match[CONF_CATEGORY] = combined_input[CONF_CATEGORY] try: - self._async_abort_entries_match( - { - CONF_COUNTRY: self._config_entry.options.get(CONF_COUNTRY), - CONF_EXCLUDES: combined_input[CONF_EXCLUDES], - CONF_OFFSET: combined_input[CONF_OFFSET], - CONF_WORKDAYS: combined_input[CONF_WORKDAYS], - CONF_ADD_HOLIDAYS: combined_input[CONF_ADD_HOLIDAYS], - CONF_REMOVE_HOLIDAYS: combined_input[CONF_REMOVE_HOLIDAYS], - CONF_PROVINCE: combined_input.get(CONF_PROVINCE), - } - ) + self._async_abort_entries_match(abort_match) except AbortFlow as err: errors = {"base": err.reason} else: diff --git a/homeassistant/components/workday/const.py b/homeassistant/components/workday/const.py index 6a46f1e824b..76580ae642f 100644 --- a/homeassistant/components/workday/const.py +++ b/homeassistant/components/workday/const.py @@ -19,6 +19,7 @@ CONF_EXCLUDES = "excludes" CONF_OFFSET = "days_offset" CONF_ADD_HOLIDAYS = "add_holidays" CONF_REMOVE_HOLIDAYS = "remove_holidays" +CONF_CATEGORY = "category" # By default, Monday - Friday are workdays DEFAULT_WORKDAYS = ["mon", "tue", "wed", "thu", "fri"] diff --git a/homeassistant/components/workday/strings.json b/homeassistant/components/workday/strings.json index 0e618beaf82..f3b966e28ea 100644 --- a/homeassistant/components/workday/strings.json +++ b/homeassistant/components/workday/strings.json @@ -20,7 +20,8 @@ "add_holidays": "Add holidays", "remove_holidays": "Remove Holidays", "province": "Subdivision of country", - "language": "Language for named holidays" + "language": "Language for named holidays", + "category": "Additional category as holiday" }, "data_description": { "excludes": "List of workdays to exclude, notice the keyword `holiday` and read the documentation on how to use it correctly", @@ -29,7 +30,8 @@ "add_holidays": "Add custom holidays as YYYY-MM-DD or as range using `,` as separator", "remove_holidays": "Remove holidays as YYYY-MM-DD, as range using `,` as separator or by using partial of name", "province": "State, territory, province or region of country", - "language": "Language to use when configuring named holiday exclusions" + "language": "Language to use when configuring named holiday exclusions", + "category": "Select additional categories to include as holidays" } } }, @@ -51,7 +53,8 @@ "add_holidays": "[%key:component::workday::config::step::options::data::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data::province%]", - "language": "[%key:component::workday::config::step::options::data::language%]" + "language": "[%key:component::workday::config::step::options::data::language%]", + "category": "[%key:component::workday::config::step::options::data::category%]" }, "data_description": { "excludes": "[%key:component::workday::config::step::options::data_description::excludes%]", @@ -60,7 +63,8 @@ "add_holidays": "[%key:component::workday::config::step::options::data_description::add_holidays%]", "remove_holidays": "[%key:component::workday::config::step::options::data_description::remove_holidays%]", "province": "[%key:component::workday::config::step::options::data_description::province%]", - "language": "[%key:component::workday::config::step::options::data_description::language%]" + "language": "[%key:component::workday::config::step::options::data_description::language%]", + "category": "[%key:component::workday::config::step::options::data_description::category%]" } } }, @@ -78,6 +82,24 @@ "none": "No subdivision" } }, + "category": { + "options": { + "armed_forces": "Armed forces", + "bank": "Bank", + "government": "Government", + "half_day": "Half day", + "optional": "Optional", + "public": "Public", + "school": "School", + "unofficial": "Unofficial", + "workday": "Workday", + "chinese": "Chinese", + "christian": "Christian", + "hebrew": "Hebrew", + "hindu": "Hindu", + "islamic": "Islamic" + } + }, "days": { "options": { "mon": "[%key:common::time::monday%]", diff --git a/homeassistant/components/worldclock/__init__.py b/homeassistant/components/worldclock/__init__.py index 978eaac8968..ad01c45917a 100644 --- a/homeassistant/components/worldclock/__init__.py +++ b/homeassistant/components/worldclock/__init__.py @@ -1 +1,25 @@ """The worldclock component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from .const import PLATFORMS + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Worldclock from a config entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload World clock config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/worldclock/config_flow.py b/homeassistant/components/worldclock/config_flow.py new file mode 100644 index 00000000000..a9598c049aa --- /dev/null +++ b/homeassistant/components/worldclock/config_flow.py @@ -0,0 +1,107 @@ +"""Config flow for World clock.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast +import zoneinfo + +import voluptuous as vol + +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TextSelector, +) + +from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN + +TIME_STR_OPTIONS = [ + SelectOptionDict( + value=DEFAULT_TIME_STR_FORMAT, label=f"14:05 ({DEFAULT_TIME_STR_FORMAT})" + ), + SelectOptionDict(value="%I:%M %p", label="11:05 am (%I:%M %p)"), + SelectOptionDict(value="%Y-%m-%d %H:%M", label="2024-01-01 14:05 (%Y-%m-%d %H:%M)"), + SelectOptionDict( + value="%a, %b %d, %Y %I:%M %p", + label="Monday, Jan 01, 2024 11:05 am (%a, %b %d, %Y %I:%M %p)", + ), +] + + +async def validate_duplicate( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate already existing entry.""" + handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001 + + return user_input + + +async def get_schema(handler: SchemaCommonFlowHandler) -> vol.Schema: + """Get available timezones.""" + get_timezones: list[str] = list( + await handler.parent_handler.hass.async_add_executor_job( + zoneinfo.available_timezones + ) + ) + return vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + vol.Required(CONF_TIME_ZONE): SelectSelector( + SelectSelectorConfig( + options=get_timezones, mode=SelectSelectorMode.DROPDOWN, sort=True + ) + ), + } + ).extend(DATA_SCHEMA_OPTIONS.schema) + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Optional(CONF_TIME_FORMAT, default=DEFAULT_TIME_STR_FORMAT): SelectSelector( + SelectSelectorConfig( + options=TIME_STR_OPTIONS, + custom_value=True, + mode=SelectSelectorMode.DROPDOWN, + ) + ) + } +) + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=get_schema, + validate_user_input=validate_duplicate, + ), + "import": SchemaFlowFormStep( + schema=get_schema, + validate_user_input=validate_duplicate, + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_duplicate, + ) +} + + +class WorldclockConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Worldclock.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/worldclock/const.py b/homeassistant/components/worldclock/const.py new file mode 100644 index 00000000000..fafa3dbc52f --- /dev/null +++ b/homeassistant/components/worldclock/const.py @@ -0,0 +1,11 @@ +"""Constants for world clock component.""" + +from homeassistant.const import Platform + +DOMAIN = "worldclock" +PLATFORMS = [Platform.SENSOR] + +CONF_TIME_FORMAT = "time_format" + +DEFAULT_NAME = "Worldclock Sensor" +DEFAULT_TIME_STR_FORMAT = "%H:%M" diff --git a/homeassistant/components/worldclock/manifest.json b/homeassistant/components/worldclock/manifest.json index 61600e4f924..bc7ee3cd939 100644 --- a/homeassistant/components/worldclock/manifest.json +++ b/homeassistant/components/worldclock/manifest.json @@ -2,6 +2,7 @@ "domain": "worldclock", "name": "Worldclock", "codeowners": ["@fabaff"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/worldclock", "iot_class": "local_push", "quality_scale": "internal" diff --git a/homeassistant/components/worldclock/sensor.py b/homeassistant/components/worldclock/sensor.py index d9b4aa90f07..f4879ca08c4 100644 --- a/homeassistant/components/worldclock/sensor.py +++ b/homeassistant/components/worldclock/sensor.py @@ -10,17 +10,17 @@ from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorEntity, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_NAME, CONF_TIME_ZONE -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -CONF_TIME_FORMAT = "time_format" - -DEFAULT_NAME = "Worldclock Sensor" -DEFAULT_TIME_STR_FORMAT = "%H:%M" +from .const import CONF_TIME_FORMAT, DEFAULT_NAME, DEFAULT_TIME_STR_FORMAT, DOMAIN PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { @@ -38,13 +38,44 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the World clock sensor.""" - time_zone = dt_util.get_time_zone(config[CONF_TIME_ZONE]) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=config, + ) + ) + + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.2.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "Worldclock", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the World clock sensor entry.""" + time_zone = await dt_util.async_get_time_zone(entry.options[CONF_TIME_ZONE]) async_add_entities( [ WorldClockSensor( time_zone, - config[CONF_NAME], - config[CONF_TIME_FORMAT], + entry.options[CONF_NAME], + entry.options[CONF_TIME_FORMAT], + entry.entry_id, ) ], True, @@ -55,12 +86,22 @@ class WorldClockSensor(SensorEntity): """Representation of a World clock sensor.""" _attr_icon = "mdi:clock" + _attr_has_entity_name = True + _attr_name = None - def __init__(self, time_zone: tzinfo | None, name: str, time_format: str) -> None: + def __init__( + self, time_zone: tzinfo | None, name: str, time_format: str, unique_id: str + ) -> None: """Initialize the sensor.""" - self._attr_name = name self._time_zone = time_zone self._time_format = time_format + self._attr_unique_id = unique_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + name=name, + entry_type=DeviceEntryType.SERVICE, + manufacturer="Worldclock", + ) async def async_update(self) -> None: """Get the time and updates the states.""" diff --git a/homeassistant/components/worldclock/strings.json b/homeassistant/components/worldclock/strings.json new file mode 100644 index 00000000000..2f6b8d67a7c --- /dev/null +++ b/homeassistant/components/worldclock/strings.json @@ -0,0 +1,35 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "step": { + "user": { + "data": { + "name": "[%key:common::config_flow::data::name%]", + "time_zone": "Timezone", + "time_format": "Time format" + }, + "data_description": { + "time_zone": "Select timezone from list", + "time_format": "Select a pre-defined format from the list or define your own format." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + }, + "step": { + "init": { + "data": { + "time_format": "[%key:component::worldclock::config::step::user::data::time_format%]" + }, + "data_description": { + "time_format": "[%key:component::worldclock::config::step::user::data_description::time_format%]" + } + } + } + } +} diff --git a/homeassistant/components/wsdot/sensor.py b/homeassistant/components/wsdot/sensor.py index 3aae6746ea9..73714b75c95 100644 --- a/homeassistant/components/wsdot/sensor.py +++ b/homeassistant/components/wsdot/sensor.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta, timezone from http import HTTPStatus import logging import re +from typing import Any import requests import voluptuous as vol @@ -125,7 +126,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._state = self._data.get(ATTR_CURRENT_TIME) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" if self._data is not None: attrs = {} @@ -140,6 +141,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._data.get(ATTR_TIME_UPDATED) ) return attrs + return None def _parse_wsdot_timestamp(timestamp): diff --git a/homeassistant/components/wyoming/devices.py b/homeassistant/components/wyoming/devices.py index 2ca66f3b21a..2e00b31fd34 100644 --- a/homeassistant/components/wyoming/devices.py +++ b/homeassistant/components/wyoming/devices.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -23,6 +24,7 @@ class SatelliteDevice: noise_suppression_level: int = 0 auto_gain: int = 0 volume_multiplier: float = 1.0 + vad_sensitivity: VadSensitivity = VadSensitivity.DEFAULT _is_active_listener: Callable[[], None] | None = None _is_muted_listener: Callable[[], None] | None = None @@ -77,6 +79,14 @@ class SatelliteDevice: if self._audio_settings_listener is not None: self._audio_settings_listener() + @callback + def set_vad_sensitivity(self, vad_sensitivity: VadSensitivity) -> None: + """Set VAD sensitivity.""" + if vad_sensitivity != self.vad_sensitivity: + self.vad_sensitivity = vad_sensitivity + if self._audio_settings_listener is not None: + self._audio_settings_listener() + @callback def set_is_active_listener(self, is_active_listener: Callable[[], None]) -> None: """Listen for updates to is_active.""" @@ -140,3 +150,10 @@ class SatelliteDevice: return ent_reg.async_get_entity_id( "number", DOMAIN, f"{self.satellite_id}-volume_multiplier" ) + + def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for VAD sensitivity.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id( + "select", DOMAIN, f"{self.satellite_id}-vad_sensitivity" + ) diff --git a/homeassistant/components/wyoming/satellite.py b/homeassistant/components/wyoming/satellite.py index 5af0c54abad..781f0706c68 100644 --- a/homeassistant/components/wyoming/satellite.py +++ b/homeassistant/components/wyoming/satellite.py @@ -1,6 +1,7 @@ """Support for Wyoming satellite services.""" import asyncio +from collections.abc import AsyncGenerator import io import logging import time @@ -8,7 +9,6 @@ from typing import Final from uuid import uuid4 import wave -from typing_extensions import AsyncGenerator from wyoming.asr import Transcribe, Transcript from wyoming.audio import AudioChunk, AudioChunkConverter, AudioStart, AudioStop from wyoming.client import AsyncTcpClient @@ -25,6 +25,7 @@ from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, intent, stt, tts from homeassistant.components.assist_pipeline import select as pipeline_select +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.config_entries import ConfigEntry from homeassistant.core import Context, HomeAssistant, callback @@ -409,6 +410,9 @@ class WyomingSatellite: noise_suppression_level=self.device.noise_suppression_level, auto_gain_dbfs=self.device.auto_gain, volume_multiplier=self.device.volume_multiplier, + silence_seconds=VadSensitivity.to_seconds( + self.device.vad_sensitivity + ), ), device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index 99f26c3e440..f852b4d0434 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -4,7 +4,11 @@ from __future__ import annotations from typing import TYPE_CHECKING, Final -from homeassistant.components.assist_pipeline.select import AssistPipelineSelect +from homeassistant.components.assist_pipeline.select import ( + AssistPipelineSelect, + VadSensitivitySelect, +) +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory @@ -45,6 +49,7 @@ async def async_setup_entry( [ WyomingSatellitePipelineSelect(hass, device), WyomingSatelliteNoiseSuppressionLevelSelect(device), + WyomingSatelliteVadSensitivitySelect(hass, device), ] ) @@ -92,3 +97,21 @@ class WyomingSatelliteNoiseSuppressionLevelSelect( self._attr_current_option = option self.async_write_ha_state() self._device.set_noise_suppression_level(_NOISE_SUPPRESSION_LEVEL[option]) + + +class WyomingSatelliteVadSensitivitySelect( + WyomingSatelliteEntity, VadSensitivitySelect +): + """VAD sensitivity selector for Wyoming satellites.""" + + def __init__(self, hass: HomeAssistant, device: SatelliteDevice) -> None: + """Initialize a VAD sensitivity selector.""" + self.device = device + + WyomingSatelliteEntity.__init__(self, device) + VadSensitivitySelect.__init__(self, hass, device.satellite_id) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + await super().async_select_option(option) + self.device.set_vad_sensitivity(VadSensitivity(option)) diff --git a/homeassistant/components/wyoming/strings.json b/homeassistant/components/wyoming/strings.json index f2768e45eb8..4a1a4c3a246 100644 --- a/homeassistant/components/wyoming/strings.json +++ b/homeassistant/components/wyoming/strings.json @@ -46,6 +46,14 @@ "high": "High", "max": "Max" } + }, + "vad_sensitivity": { + "name": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::name%]", + "state": { + "default": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::default%]", + "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", + "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" + } } }, "switch": { diff --git a/homeassistant/components/wyoming/wake_word.py b/homeassistant/components/wyoming/wake_word.py index 6eba0f7ca6d..64dfd60c068 100644 --- a/homeassistant/components/wyoming/wake_word.py +++ b/homeassistant/components/wyoming/wake_word.py @@ -89,6 +89,7 @@ class WyomingWakeWordProvider(wake_word.WakeWordDetectionEntity): """Get the next chunk from audio stream.""" async for chunk_bytes in stream: return chunk_bytes + return None try: async with AsyncTcpClient(self.service.host, self.service.port) as client: diff --git a/homeassistant/components/xiaomi/device_tracker.py b/homeassistant/components/xiaomi/device_tracker.py index b3983e76aaa..b14ec073938 100644 --- a/homeassistant/components/xiaomi/device_tracker.py +++ b/homeassistant/components/xiaomi/device_tracker.py @@ -172,7 +172,6 @@ def _get_token(host, username, password): ) _LOGGER.exception(error_message, url, data, result) return None - else: - _LOGGER.error( - "Invalid response: [%s] at url: [%s] with data [%s]", res, url, data - ) + + _LOGGER.error("Invalid response: [%s] at url: [%s] with data [%s]", res, url, data) + return None diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index cee2980fe07..75208b142dd 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -202,6 +202,8 @@ class XiaomiNatgasSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiMotionSensor(XiaomiBinarySensor): """Representation of a XiaomiMotionSensor.""" @@ -298,6 +300,8 @@ class XiaomiMotionSensor(XiaomiBinarySensor): self._state = True return True + return False + class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): """Representation of a XiaomiDoorSensor.""" @@ -357,6 +361,8 @@ class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): return True return False + return False + class XiaomiWaterLeakSensor(XiaomiBinarySensor): """Representation of a XiaomiWaterLeakSensor.""" @@ -401,6 +407,8 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiSmokeSensor(XiaomiBinarySensor): """Representation of a XiaomiSmokeSensor.""" @@ -443,6 +451,8 @@ class XiaomiSmokeSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiVibration(XiaomiBinarySensor): """Representation of a Xiaomi Vibration Sensor.""" diff --git a/homeassistant/components/xiaomi_ble/__init__.py b/homeassistant/components/xiaomi_ble/__init__.py index 4a9753bfe85..fae5e4d0c91 100644 --- a/homeassistant/components/xiaomi_ble/__init__.py +++ b/homeassistant/components/xiaomi_ble/__init__.py @@ -2,12 +2,12 @@ from __future__ import annotations +from functools import partial import logging from typing import cast from xiaomi_ble import EncryptionScheme, SensorUpdate, XiaomiBluetoothDeviceData -from homeassistant import config_entries from homeassistant.components.bluetooth import ( DOMAIN as BLUETOOTH_DOMAIN, BluetoothScanningMode, @@ -29,6 +29,7 @@ from .const import ( XiaomiBleEvent, ) from .coordinator import XiaomiActiveBluetoothProcessorCoordinator +from .types import XiaomiBLEConfigEntry PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.EVENT, Platform.SENSOR] @@ -37,16 +38,14 @@ _LOGGER = logging.getLogger(__name__) def process_service_info( hass: HomeAssistant, - entry: config_entries.ConfigEntry, - data: XiaomiBluetoothDeviceData, - service_info: BluetoothServiceInfoBleak, + entry: XiaomiBLEConfigEntry, device_registry: DeviceRegistry, + service_info: BluetoothServiceInfoBleak, ) -> SensorUpdate: """Process a BluetoothServiceInfoBleak, running side effects and returning sensor data.""" + coordinator = entry.runtime_data + data = coordinator.device_data update = data.update(service_info) - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] discovered_event_classes = coordinator.discovered_event_classes if entry.data.get(CONF_SLEEPY_DEVICE, False) != data.sleepy_device: hass.config_entries.async_update_entry( @@ -165,38 +164,29 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await data.async_poll(connectable_device) device_registry = dr.async_get(hass) - coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = ( - XiaomiActiveBluetoothProcessorCoordinator( - hass, - _LOGGER, - address=address, - mode=BluetoothScanningMode.PASSIVE, - update_method=lambda service_info: process_service_info( - hass, entry, data, service_info, device_registry - ), - needs_poll_method=_needs_poll, - device_data=data, - discovered_event_classes=set( - entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) - ), - poll_method=_async_poll, - # We will take advertisements from non-connectable devices - # since we will trade the BLEDevice for a connectable one - # if we need to poll it - connectable=False, - entry=entry, - ) + coordinator = XiaomiActiveBluetoothProcessorCoordinator( + hass, + _LOGGER, + address=address, + mode=BluetoothScanningMode.PASSIVE, + update_method=partial(process_service_info, hass, entry, device_registry), + needs_poll_method=_needs_poll, + device_data=data, + discovered_event_classes=set(entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, [])), + poll_method=_async_poll, + # We will take advertisements from non-connectable devices + # since we will trade the BLEDevice for a connectable one + # if we need to poll it + connectable=False, + entry=entry, ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - entry.async_on_unload( - coordinator.async_start() - ) # only start after all platforms have had a chance to subscribe + # only start after all platforms have had a chance to subscribe + entry.async_on_unload(coordinator.async_start()) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: XiaomiBLEConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/xiaomi_ble/binary_sensor.py b/homeassistant/components/xiaomi_ble/binary_sensor.py index 8734f45c405..5336c4d8f7f 100644 --- a/homeassistant/components/xiaomi_ble/binary_sensor.py +++ b/homeassistant/components/xiaomi_ble/binary_sensor.py @@ -8,7 +8,6 @@ from xiaomi_ble.parser import ( SensorUpdate, ) -from homeassistant import config_entries from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, @@ -22,12 +21,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN -from .coordinator import ( - XiaomiActiveBluetoothProcessorCoordinator, - XiaomiPassiveBluetoothDataProcessor, -) +from .coordinator import XiaomiPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +from .types import XiaomiBLEConfigEntry BINARY_SENSOR_DESCRIPTIONS = { XiaomiBinarySensorDeviceClass.BATTERY: BinarySensorEntityDescription( @@ -134,13 +130,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/coordinator.py b/homeassistant/components/xiaomi_ble/coordinator.py index 1cd49e851ea..69fc427013a 100644 --- a/homeassistant/components/xiaomi_ble/coordinator.py +++ b/homeassistant/components/xiaomi_ble/coordinator.py @@ -16,11 +16,11 @@ from homeassistant.components.bluetooth.active_update_processor import ( from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataProcessor, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.debounce import Debouncer from .const import CONF_SLEEPY_DEVICE +from .types import XiaomiBLEConfigEntry class XiaomiActiveBluetoothProcessorCoordinator( @@ -45,7 +45,7 @@ class XiaomiActiveBluetoothProcessorCoordinator( ] | None = None, poll_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, - entry: ConfigEntry, + entry: XiaomiBLEConfigEntry, connectable: bool = True, ) -> None: """Initialize the Xiaomi Bluetooth Active Update Processor Coordinator.""" diff --git a/homeassistant/components/xiaomi_ble/event.py b/homeassistant/components/xiaomi_ble/event.py index e39a4adb3c7..7265bcd112c 100644 --- a/homeassistant/components/xiaomi_ble/event.py +++ b/homeassistant/components/xiaomi_ble/event.py @@ -9,7 +9,6 @@ from homeassistant.components.event import ( EventEntity, EventEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -29,7 +28,7 @@ from .const import ( EVENT_TYPE, XiaomiBleEvent, ) -from .coordinator import XiaomiActiveBluetoothProcessorCoordinator +from .types import XiaomiBLEConfigEntry DESCRIPTIONS_BY_EVENT_CLASS = { EVENT_CLASS_BUTTON: EventEntityDescription( @@ -183,13 +182,11 @@ class XiaomiEventEntity(EventEntity): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Xiaomi event.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data address = coordinator.address ent_reg = er.async_get(hass) async_add_entities( diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index 65b33c3c559..3108c285dbe 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -7,7 +7,6 @@ from typing import cast from xiaomi_ble import DeviceClass, SensorUpdate, Units from xiaomi_ble.parser import ExtendedSensorDeviceClass -from homeassistant import config_entries from homeassistant.components.bluetooth.passive_update_processor import ( PassiveBluetoothDataUpdate, PassiveBluetoothProcessorEntity, @@ -35,12 +34,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.sensor import sensor_device_info_to_hass_device_info -from .const import DOMAIN -from .coordinator import ( - XiaomiActiveBluetoothProcessorCoordinator, - XiaomiPassiveBluetoothDataProcessor, -) +from .coordinator import XiaomiPassiveBluetoothDataProcessor from .device import device_key_to_bluetooth_entity_key +from .types import XiaomiBLEConfigEntry SENSOR_DESCRIPTIONS = { (DeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription( @@ -193,13 +189,11 @@ def sensor_update_to_bluetooth_data_update( async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: XiaomiBLEConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Xiaomi BLE sensors.""" - coordinator: XiaomiActiveBluetoothProcessorCoordinator = hass.data[DOMAIN][ - entry.entry_id - ] + coordinator = entry.runtime_data processor = XiaomiPassiveBluetoothDataProcessor( sensor_update_to_bluetooth_data_update ) diff --git a/homeassistant/components/xiaomi_ble/types.py b/homeassistant/components/xiaomi_ble/types.py new file mode 100644 index 00000000000..f0de8af9d06 --- /dev/null +++ b/homeassistant/components/xiaomi_ble/types.py @@ -0,0 +1,10 @@ +"""Support for xiaomi ble.""" + +from typing import TYPE_CHECKING + +from homeassistant.config_entries import ConfigEntry + +if TYPE_CHECKING: + from .coordinator import XiaomiActiveBluetoothProcessorCoordinator + +type XiaomiBLEConfigEntry = ConfigEntry[XiaomiActiveBluetoothProcessorCoordinator] diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index 7729ce27d29..6d1a81007dc 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -190,7 +190,8 @@ async def async_setup_entry( elif model in MODELS_HUMIDIFIER_MJJSQ: sensors = HUMIDIFIER_MJJSQ_BINARY_SENSORS elif model in MODELS_VACUUM: - return _setup_vacuum_sensors(hass, config_entry, async_add_entities) + _setup_vacuum_sensors(hass, config_entry, async_add_entities) + return for description in BINARY_SENSOR_TYPES: if description.key not in sensors: diff --git a/homeassistant/components/xiaomi_miio/const.py b/homeassistant/components/xiaomi_miio/const.py index 24b494f3d08..a8b1f8d4ba5 100644 --- a/homeassistant/components/xiaomi_miio/const.py +++ b/homeassistant/components/xiaomi_miio/const.py @@ -61,6 +61,7 @@ MODEL_AIRPURIFIER_2S = "zhimi.airpurifier.mc1" MODEL_AIRPURIFIER_3 = "zhimi.airpurifier.ma4" MODEL_AIRPURIFIER_3C = "zhimi.airpurifier.mb4" MODEL_AIRPURIFIER_3H = "zhimi.airpurifier.mb3" +MODEL_AIRPURIFIER_COMPACT = "xiaomi.airp.cpa4" MODEL_AIRPURIFIER_M1 = "zhimi.airpurifier.m1" MODEL_AIRPURIFIER_M2 = "zhimi.airpurifier.m2" MODEL_AIRPURIFIER_MA1 = "zhimi.airpurifier.ma1" @@ -83,6 +84,7 @@ MODEL_AIRHUMIDIFIER_CA4 = "zhimi.humidifier.ca4" MODEL_AIRHUMIDIFIER_CB1 = "zhimi.humidifier.cb1" MODEL_AIRHUMIDIFIER_JSQ = "deerma.humidifier.jsq" MODEL_AIRHUMIDIFIER_JSQ1 = "deerma.humidifier.jsq1" +MODEL_AIRHUMIDIFIER_JSQ2W = "deerma.humidifier.jsq2w" MODEL_AIRHUMIDIFIER_MJJSQ = "deerma.humidifier.mjjsq" MODEL_AIRFRESH_A1 = "dmaker.airfresh.a1" @@ -148,6 +150,7 @@ MODELS_PURIFIER_MIIO = [ MODEL_AIRPURIFIER_SA2, MODEL_AIRPURIFIER_2S, MODEL_AIRPURIFIER_2H, + MODEL_AIRPURIFIER_COMPACT, MODEL_AIRFRESH_A1, MODEL_AIRFRESH_VA2, MODEL_AIRFRESH_VA4, @@ -162,6 +165,7 @@ MODELS_HUMIDIFIER_MIOT = [MODEL_AIRHUMIDIFIER_CA4] MODELS_HUMIDIFIER_MJJSQ = [ MODEL_AIRHUMIDIFIER_JSQ, MODEL_AIRHUMIDIFIER_JSQ1, + MODEL_AIRHUMIDIFIER_JSQ2W, MODEL_AIRHUMIDIFIER_MJJSQ, ] diff --git a/homeassistant/components/xiaomi_miio/device_tracker.py b/homeassistant/components/xiaomi_miio/device_tracker.py index 9acdb1cc53e..4a7e447b8a5 100644 --- a/homeassistant/components/xiaomi_miio/device_tracker.py +++ b/homeassistant/components/xiaomi_miio/device_tracker.py @@ -71,7 +71,7 @@ class XiaomiMiioDeviceScanner(DeviceScanner): return [device["mac"] for device in station_info.associated_stations] - async def async_get_device_name(self, device): + async def async_get_device_name(self, device: str) -> str | None: """Return None. The repeater doesn't provide the name of the associated device. diff --git a/homeassistant/components/xiaomi_miio/fan.py b/homeassistant/components/xiaomi_miio/fan.py index 4e0e271b071..f075ff8816f 100644 --- a/homeassistant/components/xiaomi_miio/fan.py +++ b/homeassistant/components/xiaomi_miio/fan.py @@ -98,8 +98,8 @@ _LOGGER = logging.getLogger(__name__) DATA_KEY = "fan.xiaomi_miio" -ATTR_MODE_NATURE = "Nature" -ATTR_MODE_NORMAL = "Normal" +ATTR_MODE_NATURE = "nature" +ATTR_MODE_NORMAL = "normal" # Air Purifier ATTR_BRIGHTNESS = "brightness" @@ -294,6 +294,7 @@ class XiaomiGenericDevice(XiaomiCoordinatedMiioEntity, FanEntity): """Representation of a generic Xiaomi device.""" _attr_name = None + _enable_turn_on_off_backwards_compatibility = False def __init__(self, device, entry, unique_id, coordinator): """Initialize the generic Xiaomi device.""" @@ -479,6 +480,9 @@ class XiaomiAirPurifier(XiaomiGenericAirPurifier): self._preset_modes = PRESET_MODES_AIRPURIFIER self._attr_supported_features = FanEntityFeature.PRESET_MODE self._speed_count = 1 + self._attr_supported_features |= ( + FanEntityFeature.TURN_OFF | FanEntityFeature.TURN_ON + ) self._state = self.coordinator.data.is_on self._state_attrs.update( @@ -609,7 +613,11 @@ class XiaomiAirPurifierMB4(XiaomiGenericAirPurifier): self._device_features = FEATURE_FLAGS_AIRPURIFIER_3C self._preset_modes = PRESET_MODES_AIRPURIFIER_3C - self._attr_supported_features = FanEntityFeature.PRESET_MODE + self._attr_supported_features = ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) self._state = self.coordinator.data.is_on self._mode = self.coordinator.data.mode.value @@ -663,7 +671,10 @@ class XiaomiAirFresh(XiaomiGenericAirPurifier): self._speed_count = 4 self._preset_modes = PRESET_MODES_AIRFRESH self._attr_supported_features = ( - FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on @@ -756,7 +767,10 @@ class XiaomiAirFreshA1(XiaomiGenericAirPurifier): self._device_features = FEATURE_FLAGS_AIRFRESH_A1 self._preset_modes = PRESET_MODES_AIRFRESH_A1 self._attr_supported_features = ( - FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) self._state = self.coordinator.data.is_on @@ -831,6 +845,8 @@ class XiaomiAirFreshT2017(XiaomiAirFreshA1): class XiaomiGenericFan(XiaomiGenericDevice): """Representation of a generic Xiaomi Fan.""" + _attr_translation_key = "generic_fan" + def __init__(self, device, entry, unique_id, coordinator): """Initialize the fan.""" super().__init__(device, entry, unique_id, coordinator) @@ -851,6 +867,8 @@ class XiaomiGenericFan(XiaomiGenericDevice): FanEntityFeature.SET_SPEED | FanEntityFeature.OSCILLATE | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) if self._model != MODEL_FAN_1C: self._attr_supported_features |= FanEntityFeature.DIRECTION diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index bbd3f6607d7..2e5084a1f6c 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -1,4 +1,18 @@ { + "entity": { + "fan": { + "generic_fan": { + "state_attributes": { + "preset_mode": { + "state": { + "nature": "mdi:leaf", + "normal": "mdi:weather-windy" + } + } + } + } + } + }, "services": { "fan_reset_filter": "mdi:refresh", "fan_set_extra_features": "mdi:cog", diff --git a/homeassistant/components/xiaomi_miio/select.py b/homeassistant/components/xiaomi_miio/select.py index b785adef15a..a8e936aaf8f 100644 --- a/homeassistant/components/xiaomi_miio/select.py +++ b/homeassistant/components/xiaomi_miio/select.py @@ -50,6 +50,7 @@ from .const import ( MODEL_AIRPURIFIER_3H, MODEL_AIRPURIFIER_4, MODEL_AIRPURIFIER_4_PRO, + MODEL_AIRPURIFIER_COMPACT, MODEL_AIRPURIFIER_M1, MODEL_AIRPURIFIER_M2, MODEL_AIRPURIFIER_MA2, @@ -129,6 +130,9 @@ MODEL_TO_ATTR_MAP: dict[str, list] = { MODEL_AIRPURIFIER_4_PRO: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) ], + MODEL_AIRPURIFIER_COMPACT: [ + AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierMiotLedBrightness) + ], MODEL_AIRPURIFIER_M1: [ AttributeEnumMapping(ATTR_LED_BRIGHTNESS, AirpurifierLedBrightness) ], diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index a9588855818..bbdc3f5737d 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -93,6 +93,18 @@ "high": "High" } } + }, + "fan": { + "generic_fan": { + "state_attributes": { + "preset_mode": { + "state": { + "nature": "Nature", + "normal": "Normal" + } + } + } + } } }, "services": { @@ -210,7 +222,7 @@ }, "remote_learn_command": { "name": "Remote learn command", - "description": "Learns an IR command, press \"Call Service\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", + "description": "Learns an IR command, press \"Perform action\", point the remote at the IR device, and the learned command will be shown as a notification in Overview.", "fields": { "slot": { "name": "Slot", diff --git a/homeassistant/components/yamaha/const.py b/homeassistant/components/yamaha/const.py index c0f4e34dd50..492babe9657 100644 --- a/homeassistant/components/yamaha/const.py +++ b/homeassistant/components/yamaha/const.py @@ -1,6 +1,7 @@ """Constants for the Yamaha component.""" DOMAIN = "yamaha" +KNOWN_ZONES = "known_zones" CURSOR_TYPE_DOWN = "down" CURSOR_TYPE_LEFT = "left" CURSOR_TYPE_RETURN = "return" diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index 1be7cb03e17..a8200ea3373 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib import logging from typing import Any @@ -18,6 +19,7 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant +from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -29,6 +31,8 @@ from .const import ( CURSOR_TYPE_RIGHT, CURSOR_TYPE_SELECT, CURSOR_TYPE_UP, + DOMAIN, + KNOWN_ZONES, SERVICE_ENABLE_OUTPUT, SERVICE_MENU_CURSOR, SERVICE_SELECT_SCENE, @@ -55,7 +59,6 @@ CURSOR_TYPE_MAP = { CURSOR_TYPE_SELECT: rxv.RXV.menu_sel.__name__, CURSOR_TYPE_UP: rxv.RXV.menu_up.__name__, } -DATA_YAMAHA = "yamaha_known_receivers" DEFAULT_NAME = "Yamaha Receiver" SUPPORT_YAMAHA = ( @@ -99,6 +102,7 @@ class YamahaConfigInfo: self.zone_ignore = config.get(CONF_ZONE_IGNORE) self.zone_names = config.get(CONF_ZONE_NAMES) self.from_discovery = False + _LOGGER.debug("Discovery Info: %s", discovery_info) if discovery_info is not None: self.name = discovery_info.get("name") self.model = discovery_info.get("model_name") @@ -109,23 +113,38 @@ class YamahaConfigInfo: def _discovery(config_info): - """Discover receivers from configuration in the network.""" + """Discover list of zone controllers from configuration in the network.""" if config_info.from_discovery: - receivers = rxv.RXV( + _LOGGER.debug("Discovery Zones") + zones = rxv.RXV( config_info.ctrl_url, model_name=config_info.model, friendly_name=config_info.name, unit_desc_url=config_info.desc_url, ).zone_controllers() - _LOGGER.debug("Receivers: %s", receivers) elif config_info.host is None: - receivers = [] + _LOGGER.debug("Config No Host Supplied Zones") + zones = [] for recv in rxv.find(): - receivers.extend(recv.zone_controllers()) + zones.extend(recv.zone_controllers()) else: - receivers = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + _LOGGER.debug("Config Zones") + zones = None - return receivers + # Fix for upstream issues in rxv.find() with some hardware. + with contextlib.suppress(AttributeError): + for recv in rxv.find(): + if recv.ctrl_url == config_info.ctrl_url: + _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) + zones = recv.zone_controllers() + break + + if not zones: + _LOGGER.debug("Config Zones Fallback") + zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + + _LOGGER.debug("Returned _discover zones: %s", zones) + return zones async def async_setup_platform( @@ -138,21 +157,27 @@ async def async_setup_platform( # Keep track of configured receivers so that we don't end up # discovering a receiver dynamically that we have static config # for. Map each device from its zone_id . - known_zones = hass.data.setdefault(DATA_YAMAHA, set()) + known_zones = hass.data.setdefault(DOMAIN, {KNOWN_ZONES: set()})[KNOWN_ZONES] + _LOGGER.debug("Known receiver zones: %s", known_zones) # Get the Infos for configuration from config (YAML) or Discovery config_info = YamahaConfigInfo(config=config, discovery_info=discovery_info) # Async check if the Receivers are there in the network - receivers = await hass.async_add_executor_job(_discovery, config_info) + try: + zone_ctrls = await hass.async_add_executor_job(_discovery, config_info) + except requests.exceptions.ConnectionError as ex: + raise PlatformNotReady(f"Issue while connecting to {config_info.name}") from ex entities = [] - for receiver in receivers: - if config_info.zone_ignore and receiver.zone in config_info.zone_ignore: + for zctrl in zone_ctrls: + _LOGGER.debug("Receiver zone: %s", zctrl.zone) + if config_info.zone_ignore and zctrl.zone in config_info.zone_ignore: + _LOGGER.debug("Ignore receiver zone: %s %s", config_info.name, zctrl.zone) continue - entity = YamahaDevice( + entity = YamahaDeviceZone( config_info.name, - receiver, + zctrl, config_info.source_ignore, config_info.source_names, config_info.zone_names, @@ -163,7 +188,9 @@ async def async_setup_platform( known_zones.add(entity.zone_id) entities.append(entity) else: - _LOGGER.debug("Ignoring duplicate receiver: %s", config_info.name) + _LOGGER.debug( + "Ignoring duplicate zone: %s %s", config_info.name, zctrl.zone + ) async_add_entities(entities) @@ -184,16 +211,16 @@ async def async_setup_platform( platform.async_register_entity_service( SERVICE_MENU_CURSOR, {vol.Required(ATTR_CURSOR): vol.In(CURSOR_TYPE_MAP)}, - YamahaDevice.menu_cursor.__name__, + YamahaDeviceZone.menu_cursor.__name__, ) -class YamahaDevice(MediaPlayerEntity): - """Representation of a Yamaha device.""" +class YamahaDeviceZone(MediaPlayerEntity): + """Representation of a Yamaha device zone.""" - def __init__(self, name, receiver, source_ignore, source_names, zone_names): + def __init__(self, name, zctrl, source_ignore, source_names, zone_names): """Initialize the Yamaha Receiver.""" - self.receiver = receiver + self.zctrl = zctrl self._attr_is_volume_muted = False self._attr_volume_level = 0 self._attr_state = MediaPlayerState.OFF @@ -205,24 +232,37 @@ class YamahaDevice(MediaPlayerEntity): self._is_playback_supported = False self._play_status = None self._name = name - self._zone = receiver.zone - if self.receiver.serial_number is not None: + self._zone = zctrl.zone + if self.zctrl.serial_number is not None: # Since not all receivers will have a serial number and set a unique id # the default name of the integration may not be changed # to avoid a breaking change. - self._attr_unique_id = f"{self.receiver.serial_number}_{self._zone}" + self._attr_unique_id = f"{self.zctrl.serial_number}_{self._zone}" + _LOGGER.debug( + "Receiver zone: %s zone %s uid %s", + self._name, + self._zone, + self._attr_unique_id, + ) + else: + _LOGGER.info( + "Receiver zone: %s zone %s no uid %s", + self._name, + self._zone, + self._attr_unique_id, + ) def update(self) -> None: """Get the latest details from the device.""" try: - self._play_status = self.receiver.play_status() + self._play_status = self.zctrl.play_status() except requests.exceptions.ConnectionError: - _LOGGER.info("Receiver is offline: %s", self._name) + _LOGGER.debug("Receiver is offline: %s", self._name) self._attr_available = False return self._attr_available = True - if self.receiver.on: + if self.zctrl.on: if self._play_status is None: self._attr_state = MediaPlayerState.ON elif self._play_status.playing: @@ -232,21 +272,21 @@ class YamahaDevice(MediaPlayerEntity): else: self._attr_state = MediaPlayerState.OFF - self._attr_is_volume_muted = self.receiver.mute - self._attr_volume_level = (self.receiver.volume / 100) + 1 + self._attr_is_volume_muted = self.zctrl.mute + self._attr_volume_level = (self.zctrl.volume / 100) + 1 if self.source_list is None: self.build_source_list() - current_source = self.receiver.input + current_source = self.zctrl.input self._attr_source = self._source_names.get(current_source, current_source) - self._playback_support = self.receiver.get_playback_support() - self._is_playback_supported = self.receiver.is_playback_supported( + self._playback_support = self.zctrl.get_playback_support() + self._is_playback_supported = self.zctrl.is_playback_supported( self._attr_source ) - surround_programs = self.receiver.surround_programs() + surround_programs = self.zctrl.surround_programs() if surround_programs: - self._attr_sound_mode = self.receiver.surround_program + self._attr_sound_mode = self.zctrl.surround_program self._attr_sound_mode_list = surround_programs else: self._attr_sound_mode = None @@ -260,7 +300,7 @@ class YamahaDevice(MediaPlayerEntity): self._attr_source_list = sorted( self._source_names.get(source, source) - for source in self.receiver.inputs() + for source in self.zctrl.inputs() if source not in self._source_ignore ) @@ -277,7 +317,7 @@ class YamahaDevice(MediaPlayerEntity): @property def zone_id(self): """Return a zone_id to ensure 1 media player per zone.""" - return f"{self.receiver.ctrl_url}:{self._zone}" + return f"{self.zctrl.ctrl_url}:{self._zone}" @property def supported_features(self) -> MediaPlayerEntityFeature: @@ -301,42 +341,42 @@ class YamahaDevice(MediaPlayerEntity): def turn_off(self) -> None: """Turn off media player.""" - self.receiver.on = False + self.zctrl.on = False def set_volume_level(self, volume: float) -> None: """Set volume level, range 0..1.""" - receiver_vol = 100 - (volume * 100) - negative_receiver_vol = -receiver_vol - self.receiver.volume = negative_receiver_vol + zone_vol = 100 - (volume * 100) + negative_zone_vol = -zone_vol + self.zctrl.volume = negative_zone_vol def mute_volume(self, mute: bool) -> None: """Mute (true) or unmute (false) media player.""" - self.receiver.mute = mute + self.zctrl.mute = mute def turn_on(self) -> None: """Turn the media player on.""" - self.receiver.on = True - self._attr_volume_level = (self.receiver.volume / 100) + 1 + self.zctrl.on = True + self._attr_volume_level = (self.zctrl.volume / 100) + 1 def media_play(self) -> None: """Send play command.""" - self._call_playback_function(self.receiver.play, "play") + self._call_playback_function(self.zctrl.play, "play") def media_pause(self) -> None: """Send pause command.""" - self._call_playback_function(self.receiver.pause, "pause") + self._call_playback_function(self.zctrl.pause, "pause") def media_stop(self) -> None: """Send stop command.""" - self._call_playback_function(self.receiver.stop, "stop") + self._call_playback_function(self.zctrl.stop, "stop") def media_previous_track(self) -> None: """Send previous track command.""" - self._call_playback_function(self.receiver.previous, "previous track") + self._call_playback_function(self.zctrl.previous, "previous track") def media_next_track(self) -> None: """Send next track command.""" - self._call_playback_function(self.receiver.next, "next track") + self._call_playback_function(self.zctrl.next, "next track") def _call_playback_function(self, function, function_text): try: @@ -346,7 +386,7 @@ class YamahaDevice(MediaPlayerEntity): def select_source(self, source: str) -> None: """Select input source.""" - self.receiver.input = self._reverse_mapping.get(source, source) + self.zctrl.input = self._reverse_mapping.get(source, source) def play_media( self, media_type: MediaType | str, media_id: str, **kwargs: Any @@ -370,41 +410,43 @@ class YamahaDevice(MediaPlayerEntity): menu must be fetched by the receiver from the vtuner service. """ if media_type == "NET RADIO": - self.receiver.net_radio(media_id) + self.zctrl.net_radio(media_id) def enable_output(self, port, enabled): """Enable or disable an output port..""" - self.receiver.enable_output(port, enabled) + self.zctrl.enable_output(port, enabled) def menu_cursor(self, cursor): """Press a menu cursor button.""" - getattr(self.receiver, CURSOR_TYPE_MAP[cursor])() + getattr(self.zctrl, CURSOR_TYPE_MAP[cursor])() def set_scene(self, scene): """Set the current scene.""" try: - self.receiver.scene = scene + self.zctrl.scene = scene except AssertionError: _LOGGER.warning("Scene '%s' does not exist!", scene) def select_sound_mode(self, sound_mode: str) -> None: """Set Sound Mode for Receiver..""" - self.receiver.surround_program = sound_mode + self.zctrl.surround_program = sound_mode @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: return self._play_status.artist + return None @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album of current playing media.""" if self._play_status is not None: return self._play_status.album + return None @property - def media_content_type(self): + def media_content_type(self) -> MediaType | None: """Content type of current playing media.""" # Loose assumption that if playback is supported, we are playing music if self._is_playback_supported: @@ -412,7 +454,7 @@ class YamahaDevice(MediaPlayerEntity): return None @property - def media_title(self): + def media_title(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: song = self._play_status.song @@ -424,3 +466,4 @@ class YamahaDevice(MediaPlayerEntity): return f"{station}: {song}" return song or station + return None diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index 703f81d2823..1d1219d5a95 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", - "requirements": ["aioymaps==1.2.2"] + "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandex_transport/sensor.py b/homeassistant/components/yandex_transport/sensor.py index 30227e3261e..95c4785a341 100644 --- a/homeassistant/components/yandex_transport/sensor.py +++ b/homeassistant/components/yandex_transport/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta import logging -from aioymaps import CaptchaError, YandexMapsRequester +from aioymaps import CaptchaError, NoSessionError, YandexMapsRequester import voluptuous as vol from homeassistant.components.sensor import ( @@ -88,7 +88,7 @@ class DiscoverYandexTransport(SensorEntity): closer_time = None try: yandex_reply = await self.requester.get_stop_info(self._stop_id) - except CaptchaError as ex: + except (CaptchaError, NoSessionError) as ex: _LOGGER.error( "%s. You may need to disable the integration for some time", ex, diff --git a/homeassistant/components/yeelight/manifest.json b/homeassistant/components/yeelight/manifest.json index 4c63ab79baf..efb08e26b5a 100644 --- a/homeassistant/components/yeelight/manifest.json +++ b/homeassistant/components/yeelight/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["async_upnp_client", "yeelight"], "quality_scale": "platinum", - "requirements": ["yeelight==0.7.14", "async-upnp-client==0.39.0"], + "requirements": ["yeelight==0.7.14", "async-upnp-client==0.40.0"], "zeroconf": [ { "type": "_miio._udp.local.", diff --git a/homeassistant/components/yolink/climate.py b/homeassistant/components/yolink/climate.py index 21e0a71ebcb..98f1b764498 100644 --- a/homeassistant/components/yolink/climate.py +++ b/homeassistant/components/yolink/climate.py @@ -77,6 +77,7 @@ class YoLinkClimateEntity(YoLinkEntity, ClimateEntity): self._attr_fan_modes = [FAN_ON, FAN_AUTO] self._attr_min_temp = -10 self._attr_max_temp = 50 + self._attr_hvac_mode = None self._attr_hvac_modes = [ HVACMode.COOL, HVACMode.HEAT, diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index e829fe08d32..686160d9248 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -17,3 +17,9 @@ YOLINK_OFFLINE_TIME = 32400 DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" +DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" +DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" +DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" +DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" +DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 5353d5d5b8c..ceb4e4ceff3 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.4"] + "requirements": ["yolink-api==0.4.6"] } diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 6badeefbdb3..77bbccb2f6a 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -48,7 +48,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage -from .const import DOMAIN +from .const import ( + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DOMAIN, +) from .coordinator import YoLinkCoordinator from .entity import YoLinkEntity @@ -108,6 +116,15 @@ MCU_DEV_TEMPERATURE_SENSOR = [ ATTR_DEVICE_CO_SMOKE_SENSOR, ] +NONE_HUMIDITY_SENSOR_MODELS = [ + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, +] + def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -141,7 +158,8 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( device_class=SensorDeviceClass.HUMIDITY, native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, - exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR], + exists_fn=lambda device: device.device_type in [ATTR_DEVICE_TH_SENSOR] + and device.device_model_name not in NONE_HUMIDITY_SENSOR_MODELS, ), YoLinkSensorEntityDescription( key="temperature", diff --git a/homeassistant/components/yolink/valve.py b/homeassistant/components/yolink/valve.py index a24ad7d385d..d8c199697c3 100644 --- a/homeassistant/components/yolink/valve.py +++ b/homeassistant/components/yolink/valve.py @@ -37,7 +37,7 @@ DEVICE_TYPES: tuple[YoLinkValveEntityDescription, ...] = ( key="valve_state", translation_key="meter_valve_state", device_class=ValveDeviceClass.WATER, - value=lambda value: value == "closed" if value is not None else None, + value=lambda value: value != "open" if value is not None else None, exists_fn=lambda device: device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER and not device.device_model_name.startswith(DEV_MODEL_WATER_METER_YS5007), diff --git a/homeassistant/components/zeversolar/diagnostics.py b/homeassistant/components/zeversolar/diagnostics.py index b8901a7e793..6e6ed262f51 100644 --- a/homeassistant/components/zeversolar/diagnostics.py +++ b/homeassistant/components/zeversolar/diagnostics.py @@ -31,6 +31,7 @@ async def async_get_config_entry_diagnostics( "num_inverters": data.num_inverters, "serial_number": data.serial_number, "pac": data.pac, + "energy_today": data.energy_today, "status": data.status.value, "meter_status": data.meter_status.value, } diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index ed74cde47e1..fc573b19ab1 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -1,17 +1,18 @@ """Support for Zigbee Home Automation devices.""" import contextlib -import copy import logging -import re import voluptuous as vol -from zhaquirks import setup as setup_quirks +from zha.application.const import BAUD_RATES, RadioType +from zha.application.gateway import Gateway +from zha.application.helpers import ZHAData +from zha.zigbee.device import get_device_automation_triggers from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP +from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr @@ -20,9 +21,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType from . import repairs, websocket_api -from .core import ZHAGateway -from .core.const import ( - BAUD_RATES, +from .const import ( CONF_BAUDRATE, CONF_CUSTOM_QUIRKS_PATH, CONF_DEVICE_CONFIG, @@ -33,13 +32,14 @@ from .core.const import ( CONF_ZIGPY, DATA_ZHA, DOMAIN, - PLATFORMS, - SIGNAL_ADD_ENTITIES, - RadioType, ) -from .core.device import get_device_automation_triggers -from .core.discovery import GROUP_PROBE -from .core.helpers import ZHAData, get_zha_data +from .helpers import ( + SIGNAL_ADD_ENTITIES, + HAZHAData, + ZHAGatewayProxy, + create_zha_config, + get_zha_data, +) from .radio_manager import ZhaRadioManager from .repairs.network_settings_inconsistent import warn_on_inconsistent_network_settings from .repairs.wrong_silabs_firmware import ( @@ -74,6 +74,25 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) +PLATFORMS = ( + Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.COVER, + Platform.DEVICE_TRACKER, + Platform.FAN, + Platform.LIGHT, + Platform.LOCK, + Platform.NUMBER, + Platform.SELECT, + Platform.SENSOR, + Platform.SIREN, + Platform.SWITCH, + Platform.UPDATE, +) + + # Zigbee definitions CENTICELSIUS = "C-100" @@ -83,49 +102,22 @@ _LOGGER = logging.getLogger(__name__) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up ZHA from config.""" - zha_data = ZHAData() - zha_data.yaml_config = config.get(DOMAIN, {}) - hass.data[DATA_ZHA] = zha_data + ha_zha_data = HAZHAData(yaml_config=config.get(DOMAIN, {})) + hass.data[DATA_ZHA] = ha_zha_data return True -def _clean_serial_port_path(path: str) -> str: - """Clean the serial port path, applying corrections where necessary.""" - - if path.startswith("socket://"): - path = path.strip() - - # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) - if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): - path = path.replace("[", "").replace("]", "") - - return path - - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up ZHA. Will automatically load components to support devices found on the network. """ + ha_zha_data: HAZHAData = get_zha_data(hass) + ha_zha_data.config_entry = config_entry + zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data) - # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 - # This will be removed in 2023.11.0 - path = config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] - cleaned_path = _clean_serial_port_path(path) - data = copy.deepcopy(dict(config_entry.data)) - - if path != cleaned_path: - _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) - data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_data = get_zha_data(hass) - - if zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True): - await hass.async_add_import_executor_job( - setup_quirks, zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH) - ) + zha_gateway = await Gateway.async_from_config(zha_lib_data) # Load and cache device trigger information early device_registry = dr.async_get(hass) @@ -141,19 +133,16 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if dev_entry is None: continue - zha_data.device_trigger_cache[dev_entry.id] = ( + zha_lib_data.device_trigger_cache[dev_entry.id] = ( str(dev.ieee), get_device_automation_triggers(dev), ) + ha_zha_data.device_trigger_cache = zha_lib_data.device_trigger_cache - _LOGGER.debug("Trigger cache: %s", zha_data.device_trigger_cache) + _LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache) try: - zha_gateway = await ZHAGateway.async_from_config( - hass=hass, - config=zha_data.yaml_config, - config_entry=config_entry, - ) + await zha_gateway.async_initialize() except NetworkSettingsInconsistent as exc: await warn_on_inconsistent_network_settings( hass, @@ -185,6 +174,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b repairs.async_delete_blocking_issues(hass) + ha_zha_data.gateway_proxy = ZHAGatewayProxy(hass, config_entry, zha_gateway) + manufacturer = zha_gateway.state.node_info.manufacturer model = zha_gateway.state.node_info.model @@ -205,13 +196,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b websocket_api.async_load_api(hass) async def async_shutdown(_: Event) -> None: - await zha_gateway.shutdown() + """Handle shutdown tasks.""" + assert ha_zha_data.gateway_proxy is not None + await ha_zha_data.gateway_proxy.shutdown() config_entry.async_on_unload( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown) ) - await zha_gateway.async_initialize_devices_and_entities() + await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities() await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES) return True @@ -219,11 +212,12 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload ZHA config entry.""" - zha_data = get_zha_data(hass) + ha_zha_data = get_zha_data(hass) + ha_zha_data.config_entry = None - if zha_data.gateway is not None: - await zha_data.gateway.shutdown() - zha_data.gateway = None + if ha_zha_data.gateway_proxy is not None: + await ha_zha_data.gateway_proxy.shutdown() + ha_zha_data.gateway_proxy = None # clean up any remaining entity metadata # (entities that have been discovered but not yet added to HA) @@ -231,15 +225,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> # be in when we get here in failure cases with contextlib.suppress(KeyError): for platform in PLATFORMS: - del zha_data.platforms[platform] + del ha_zha_data.platforms[platform] - GROUP_PROBE.cleanup() websocket_api.async_unload_api(hass) - # our components don't have unload methods so no need to look at return values - await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) - - return True + return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/zha/alarm_control_panel.py b/homeassistant/components/zha/alarm_control_panel.py index 7750e7f280d..c54d7c7ab2d 100644 --- a/homeassistant/components/zha/alarm_control_panel.py +++ b/homeassistant/components/zha/alarm_control_panel.py @@ -3,9 +3,6 @@ from __future__ import annotations import functools -from typing import TYPE_CHECKING - -from zigpy.zcl.clusters.security import IasAce from homeassistant.components.alarm_control_panel import ( AlarmControlPanelEntity, @@ -13,50 +10,18 @@ from homeassistant.components.alarm_control_panel import ( CodeFormat, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_ALARM_ARMED_AWAY, - STATE_ALARM_ARMED_HOME, - STATE_ALARM_ARMED_NIGHT, - STATE_ALARM_DISARMED, - STATE_ALARM_TRIGGERED, - Platform, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.cluster_handlers.security import ( - SIGNAL_ALARM_TRIGGERED, - SIGNAL_ARMED_STATE_CHANGED, - IasAceClusterHandler, -) -from .core.const import ( - CLUSTER_HANDLER_IAS_ACE, - CONF_ALARM_ARM_REQUIRES_CODE, - CONF_ALARM_FAILED_TRIES, - CONF_ALARM_MASTER_CODE, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - ZHA_ALARM_OPTIONS, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import async_get_zha_config_value, get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.device import ZHADevice - -STRICT_MATCH = functools.partial( - ZHA_ENTITIES.strict_match, Platform.ALARM_CONTROL_PANEL -) - -IAS_ACE_STATE_MAP = { - IasAce.PanelStatus.Panel_Disarmed: STATE_ALARM_DISARMED, - IasAce.PanelStatus.Armed_Stay: STATE_ALARM_ARMED_HOME, - IasAce.PanelStatus.Armed_Night: STATE_ALARM_ARMED_NIGHT, - IasAce.PanelStatus.Armed_Away: STATE_ALARM_ARMED_AWAY, - IasAce.PanelStatus.In_Alarm: STATE_ALARM_TRIGGERED, -} async def async_setup_entry( @@ -72,14 +37,16 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, + async_add_entities, + ZHAAlarmControlPanel, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_ACE) -class ZHAAlarmControlPanel(ZhaEntity, AlarmControlPanelEntity): +class ZHAAlarmControlPanel(ZHAEntity, AlarmControlPanelEntity): """Entity for ZHA alarm control devices.""" _attr_translation_key: str = "alarm_control_panel" @@ -91,68 +58,42 @@ class ZHAAlarmControlPanel(ZhaEntity, AlarmControlPanelEntity): | AlarmControlPanelEntityFeature.TRIGGER ) - def __init__( - self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs - ) -> None: - """Initialize the ZHA alarm control device.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - cfg_entry = zha_device.gateway.config_entry - self._cluster_handler: IasAceClusterHandler = cluster_handlers[0] - self._cluster_handler.panel_code = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_MASTER_CODE, "1234" - ) - self._cluster_handler.code_required_arm_actions = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_ARM_REQUIRES_CODE, False - ) - self._cluster_handler.max_invalid_tries = async_get_zha_config_value( - cfg_entry, ZHA_ALARM_OPTIONS, CONF_ALARM_FAILED_TRIES, 3 - ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ARMED_STATE_CHANGED, self.async_set_armed_mode - ) - self.async_accept_signal( - self._cluster_handler, SIGNAL_ALARM_TRIGGERED, self.async_alarm_trigger - ) - - @callback - def async_set_armed_mode(self) -> None: - """Set the entity state.""" - self.async_write_ha_state() - @property def code_arm_required(self) -> bool: """Whether the code is required for arm actions.""" - return self._cluster_handler.code_required_arm_actions + return self.entity_data.entity.code_arm_required + @convert_zha_error_to_ha_error async def async_alarm_disarm(self, code: str | None = None) -> None: """Send disarm command.""" - self._cluster_handler.arm(IasAce.ArmMode.Disarm, code, 0) + await self.entity_data.entity.async_alarm_disarm(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_home(self, code: str | None = None) -> None: """Send arm home command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_Day_Home_Only, code, 0) + await self.entity_data.entity.async_alarm_arm_home(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_away(self, code: str | None = None) -> None: """Send arm away command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_All_Zones, code, 0) + await self.entity_data.entity.async_alarm_arm_away(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_arm_night(self, code: str | None = None) -> None: """Send arm night command.""" - self._cluster_handler.arm(IasAce.ArmMode.Arm_Night_Sleep_Only, code, 0) + await self.entity_data.entity.async_alarm_arm_night(code) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_alarm_trigger(self, code: str | None = None) -> None: """Send alarm trigger command.""" + await self.entity_data.entity.async_alarm_trigger(code) self.async_write_ha_state() @property def state(self) -> str | None: """Return the state of the entity.""" - return IAS_ACE_STATE_MAP.get(self._cluster_handler.armed_state) + return self.entity_data.entity.state["state"] diff --git a/homeassistant/components/zha/api.py b/homeassistant/components/zha/api.py index db0658eb632..60960a3e9fc 100644 --- a/homeassistant/components/zha/api.py +++ b/homeassistant/components/zha/api.py @@ -4,13 +4,14 @@ from __future__ import annotations from typing import TYPE_CHECKING, Literal +from zha.application.const import RadioType from zigpy.backups import NetworkBackup from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.types import Channels from zigpy.util import pick_optimal_channel -from .core.const import CONF_RADIO_TYPE, DOMAIN, RadioType -from .core.helpers import get_zha_gateway +from .const import CONF_RADIO_TYPE, DOMAIN +from .helpers import get_zha_data, get_zha_gateway from .radio_manager import ZhaRadioManager if TYPE_CHECKING: @@ -22,14 +23,12 @@ def _get_config_entry(hass: HomeAssistant) -> ConfigEntry: """Find the singleton ZHA config entry, if one exists.""" # If ZHA is already running, use its config entry - try: - zha_gateway = get_zha_gateway(hass) - except ValueError: - pass - else: - return zha_gateway.config_entry + zha_data = get_zha_data(hass) - # Otherwise, find one + if zha_data.config_entry is not None: + return zha_data.config_entry + + # Otherwise, find an inactive one entries = hass.config_entries.async_entries(DOMAIN) if len(entries) != 1: diff --git a/homeassistant/components/zha/backup.py b/homeassistant/components/zha/backup.py index e31ae09eeb6..a3d9090eaba 100644 --- a/homeassistant/components/zha/backup.py +++ b/homeassistant/components/zha/backup.py @@ -4,7 +4,7 @@ import logging from homeassistant.core import HomeAssistant -from .core.helpers import get_zha_gateway +from .helpers import get_zha_gateway _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/binary_sensor.py b/homeassistant/components/zha/binary_sensor.py index bdd2fd03ca0..f45ebf0c5a5 100644 --- a/homeassistant/components/zha/binary_sensor.py +++ b/homeassistant/components/zha/binary_sensor.py @@ -3,58 +3,24 @@ from __future__ import annotations import functools -import logging - -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy.quirks.v2 import BinarySensorMetadata -import zigpy.types as t -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.clusters.security import IasZone from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON, EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ACCELEROMETER, - CLUSTER_HANDLER_BINARY_INPUT, - CLUSTER_HANDLER_HUE_OCCUPANCY, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - CLUSTER_HANDLER_ZONE, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -# Zigbee Cluster Library Zone Type to Home Assistant device class -IAS_ZONE_CLASS_MAPPING = { - IasZone.ZoneType.Motion_Sensor: BinarySensorDeviceClass.MOTION, - IasZone.ZoneType.Contact_Switch: BinarySensorDeviceClass.OPENING, - IasZone.ZoneType.Fire_Sensor: BinarySensorDeviceClass.SMOKE, - IasZone.ZoneType.Water_Sensor: BinarySensorDeviceClass.MOISTURE, - IasZone.ZoneType.Carbon_Monoxide_Sensor: BinarySensorDeviceClass.GAS, - IasZone.ZoneType.Vibration_Movement_Sensor: BinarySensorDeviceClass.VIBRATION, -} - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.BINARY_SENSOR) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BINARY_SENSOR) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.BINARY_SENSOR -) - -_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -70,312 +36,24 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, BinarySensor, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BinarySensor(ZhaEntity, BinarySensorEntity): +class BinarySensor(ZHAEntity, BinarySensorEntity): """ZHA BinarySensor.""" - _attribute_name: str - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: + def __init__(self, entity_data: EntityData) -> None: """Initialize the ZHA binary sensor.""" - self._cluster_handler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: BinarySensorMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - BinarySensorDeviceClass, - entity_metadata.device_class, - Platform.BINARY_SENSOR.value, - _LOGGER, + super().__init__(entity_data) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = BinarySensorDeviceClass( + self.entity_data.entity.info_object.device_class ) - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - @property def is_on(self) -> bool: """Return True if the switch is on based on the state machine.""" - raw_state = self._cluster_handler.cluster.get(self._attribute_name) - if raw_state is None: - return False - return self.parse(raw_state) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Set the state.""" - self.async_write_ha_state() - - @staticmethod - def parse(value: bool | int) -> bool: - """Parse the raw attribute into a bool state.""" - return bool(value) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ACCELEROMETER) -class Accelerometer(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "acceleration" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOVING - _attr_translation_key: str = "accelerometer" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY) -class Occupancy(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "occupancy" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY) -class HueOccupancy(Occupancy): - """ZHA Hue occupancy.""" - - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OCCUPANCY - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class Opening(BinarySensor): - """ZHA OnOff BinarySensor.""" - - _attribute_name = "on_off" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING - - # Client/out cluster attributes aren't stored in the zigpy database, but are properly stored in the runtime cache. - # We need to manually restore the last state from the sensor state to the runtime cache for now. - @callback - def async_restore_last_state(self, last_state): - """Restore previous state to zigpy cache.""" - self._cluster_handler.cluster.update_attribute( - OnOff.attributes_by_name[self._attribute_name].id, - t.Bool.true if last_state.state == STATE_ON else t.Bool.false, - ) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BINARY_INPUT) -class BinaryInput(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "present_value" - _attr_translation_key: str = "binary_input" - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - manufacturers="IKEA of Sweden", - models=lambda model: isinstance(model, str) - and model is not None - and model.find("motion") != -1, -) -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - manufacturers="Philips", - models={"SML001", "SML002"}, -) -class Motion(Opening): - """ZHA OnOff BinarySensor with motion device class.""" - - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.MOTION - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE) -class IASZone(BinarySensor): - """ZHA IAS BinarySensor.""" - - _attribute_name = "zone_status" - - @property - def translation_key(self) -> str | None: - """Return the name of the sensor.""" - zone_type = self._cluster_handler.cluster.get("zone_type") - if zone_type in IAS_ZONE_CLASS_MAPPING: - return None - return "ias_zone" - - @property - def device_class(self) -> BinarySensorDeviceClass | None: - """Return device class from component DEVICE_CLASSES.""" - zone_type = self._cluster_handler.cluster.get("zone_type") - return IAS_ZONE_CLASS_MAPPING.get(zone_type) - - @staticmethod - def parse(value: bool | int) -> bool: - """Parse the raw attribute into a bool state.""" - return BinarySensor.parse(value & 3) # use only bit 0 and 1 for alarm state - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE, models={"WL4200", "WL4200S"}) -class SinopeLeakStatus(BinarySensor): - """Sinope water leak sensor.""" - - _attribute_name = "leak_status" - _attr_device_class = BinarySensorDeviceClass.MOISTURE - - -@MULTI_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -class FrostLock(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "frost_lock" - _unique_id_suffix = "frost_lock" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.LOCK - _attr_translation_key: str = "frost_lock" - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -class ReplaceFilter(BinarySensor): - """ZHA BinarySensor.""" - - _attribute_name = "replace_filter" - _unique_id_suffix = "replace_filter" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "replace_filter" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -class AqaraPetFeederErrorDetected(BinarySensor): - """ZHA aqara pet feeder error detected binary sensor.""" - - _attribute_name = "error_detected" - _unique_id_suffix = "error_detected" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - - -@MULTI_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, -) -class XiaomiPlugConsumerConnected(BinarySensor): - """ZHA Xiaomi plug consumer connected binary sensor.""" - - _attribute_name = "consumer_connected" - _unique_id_suffix = "consumer_connected" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PLUG - _attr_translation_key: str = "consumer_connected" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) -class AqaraThermostatWindowOpen(BinarySensor): - """ZHA Aqara thermostat window open binary sensor.""" - - _attribute_name = "window_open" - _unique_id_suffix = "window_open" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.WINDOW - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"}) -class AqaraThermostatValveAlarm(BinarySensor): - """ZHA Aqara thermostat valve alarm binary sensor.""" - - _attribute_name = "valve_alarm" - _unique_id_suffix = "valve_alarm" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.PROBLEM - _attr_translation_key: str = "valve_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatCalibrated(BinarySensor): - """ZHA Aqara thermostat calibrated binary sensor.""" - - _attribute_name = "calibrated" - _unique_id_suffix = "calibrated" - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "calibrated" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatExternalSensor(BinarySensor): - """ZHA Aqara thermostat external sensor binary sensor.""" - - _attribute_name = "sensor" - _unique_id_suffix = "sensor" - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - _attr_translation_key: str = "external_sensor" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) -class AqaraLinkageAlarmState(BinarySensor): - """ZHA Aqara linkage alarm state binary sensor.""" - - _attribute_name = "linkage_alarm_state" - _unique_id_suffix = "linkage_alarm_state" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.SMOKE - _attr_translation_key: str = "linkage_alarm_state" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -class AqaraE1CurtainMotorOpenedByHandBinarySensor(BinarySensor): - """Opened by hand binary sensor.""" - - _unique_id_suffix = "hand_open" - _attribute_name = "hand_open" - _attr_translation_key = "hand_open" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossMountingModeActive(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether in mounting mode.""" - - _unique_id_suffix = "mounting_mode_active" - _attribute_name = "mounting_mode_active" - _attr_translation_key: str = "mounting_mode_active" - _attr_device_class: BinarySensorDeviceClass = BinarySensorDeviceClass.OPENING - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossHeatRequired(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether heat is required.""" - - _unique_id_suffix = "heat_required" - _attribute_name = "heat_required" - _attr_translation_key: str = "heat_required" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossPreheatStatus(BinarySensor): - """Danfoss TRV proprietary attribute exposing whether in pre-heating mode.""" - - _unique_id_suffix = "preheat_status" - _attribute_name = "preheat_status" - _attr_translation_key: str = "preheat_status" - _attr_entity_registry_enabled_default = False - _attr_entity_category = EntityCategory.DIAGNOSTIC + return self.entity_data.entity.is_on diff --git a/homeassistant/components/zha/button.py b/homeassistant/components/zha/button.py index 33102062443..ecd5cd51f61 100644 --- a/homeassistant/components/zha/button.py +++ b/homeassistant/components/zha/button.py @@ -4,33 +4,22 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zigpy.quirks.v2 import WriteAttributeButtonMetadata, ZCLCommandButtonMetadata from homeassistant.components.button import ButtonDeviceClass, ButtonEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import CLUSTER_HANDLER_IDENTIFY, ENTITY_METADATA, SIGNAL_ADD_ENTITIES -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - - -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.BUTTON) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.BUTTON +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -DEFAULT_DURATION = 5 # seconds _LOGGER = logging.getLogger(__name__) @@ -48,172 +37,24 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZHAButton, entities_to_create ), ) config_entry.async_on_unload(unsub) -class ZHAButton(ZhaEntity, ButtonEntity): +class ZHAButton(ZHAEntity, ButtonEntity): """Defines a ZHA button.""" - _command_name: str - _args: list[Any] - _kwargs: dict[str, Any] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this button.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata( - self, entity_metadata: ZCLCommandButtonMetadata - ) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._command_name = entity_metadata.command_name - self._args = entity_metadata.args - self._kwargs = entity_metadata.kwargs - - def get_args(self) -> list[Any]: - """Return the arguments to use in the command.""" - return list(self._args) if self._args else [] - - def get_kwargs(self) -> dict[str, Any]: - """Return the keyword arguments to use in the command.""" - return self._kwargs + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA binary sensor.""" + super().__init__(entity_data) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = ButtonDeviceClass( + self.entity_data.entity.info_object.device_class + ) + @convert_zha_error_to_ha_error async def async_press(self) -> None: """Send out a update command.""" - command = getattr(self._cluster_handler, self._command_name) - arguments = self.get_args() or [] - kwargs = self.get_kwargs() or {} - await command(*arguments, **kwargs) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IDENTIFY) -class ZHAIdentifyButton(ZHAButton): - """Defines a ZHA identify button.""" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - if ZHA_ENTITIES.prevent_entity_creation( - Platform.BUTTON, zha_device.ieee, CLUSTER_HANDLER_IDENTIFY - ): - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - _attr_device_class = ButtonDeviceClass.IDENTIFY - _attr_entity_category = EntityCategory.DIAGNOSTIC - _command_name = "identify" - _kwargs = {} - _args = [DEFAULT_DURATION] - - -class ZHAAttributeButton(ZhaEntity, ButtonEntity): - """Defines a ZHA button, which writes a value to an attribute.""" - - _attribute_name: str - _attribute_value: Any = None - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this button.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata( - self, entity_metadata: WriteAttributeButtonMetadata - ) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - self._attribute_value = entity_metadata.attribute_value - - async def async_press(self) -> None: - """Write attribute with defined value.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._attribute_value} - ) - self.async_write_ha_state() - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -class FrostLockResetButton(ZHAAttributeButton): - """Defines a ZHA frost lock reset button.""" - - _unique_id_suffix = "reset_frost_lock" - _attribute_name = "frost_lock_reset" - _attribute_value = 0 - _attr_device_class = ButtonDeviceClass.RESTART - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "reset_frost_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class NoPresenceStatusResetButton(ZHAAttributeButton): - """Defines a ZHA no presence status reset button.""" - - _unique_id_suffix = "reset_no_presence_status" - _attribute_name = "reset_no_presence_status" - _attribute_value = 1 - _attr_device_class = ButtonDeviceClass.RESTART - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "reset_no_presence_status" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -class AqaraPetFeederFeedButton(ZHAAttributeButton): - """Defines a feed button for the aqara c1 pet feeder.""" - - _unique_id_suffix = "feeding" - _attribute_name = "feeding" - _attribute_value = 1 - _attr_translation_key = "feed" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraSelfTestButton(ZHAAttributeButton): - """Defines a ZHA self-test button for Aqara smoke sensors.""" - - _unique_id_suffix = "self_test" - _attribute_name = "self_test" - _attribute_value = 1 - _attr_entity_category = EntityCategory.CONFIG - _attr_translation_key = "self_test" + await self.entity_data.entity.async_press() diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index 61c5f28ca8f..f4fb58c254a 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -6,109 +6,62 @@ at https://home-assistant.io/components/zha.climate/ from __future__ import annotations -from datetime import datetime, timedelta +from collections.abc import Mapping import functools -from random import randint from typing import Any -from zigpy.zcl.clusters.hvac import Fan as F, Thermostat as T +from zha.application.platforms.climate.const import ( + ClimateEntityFeature as ZHAClimateEntityFeature, + HVACAction as ZHAHVACAction, + HVACMode as ZHAHVACMode, +) from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - FAN_AUTO, - FAN_ON, - PRESET_AWAY, - PRESET_BOOST, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, + ATTR_TEMPERATURE, ClimateEntity, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - PRECISION_TENTHS, - Platform, - UnitOfTemperature, -) +from homeassistant.const import PRECISION_TENTHS, Platform, UnitOfTemperature from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_time_interval -import homeassistant.util.dt as dt_util -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_FAN, - CLUSTER_HANDLER_THERMOSTAT, - PRESET_COMPLEX, - PRESET_SCHEDULE, - PRESET_TEMP_MANUAL, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + exclude_none_values, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -ATTR_SYS_MODE = "system_mode" -ATTR_RUNNING_MODE = "running_mode" -ATTR_SETPT_CHANGE_SRC = "setpoint_change_source" -ATTR_SETPT_CHANGE_AMT = "setpoint_change_amount" -ATTR_OCCUPANCY = "occupancy" -ATTR_PI_COOLING_DEMAND = "pi_cooling_demand" -ATTR_PI_HEATING_DEMAND = "pi_heating_demand" -ATTR_OCCP_COOL_SETPT = "occupied_cooling_setpoint" -ATTR_OCCP_HEAT_SETPT = "occupied_heating_setpoint" -ATTR_UNOCCP_HEAT_SETPT = "unoccupied_heating_setpoint" -ATTR_UNOCCP_COOL_SETPT = "unoccupied_cooling_setpoint" - - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.CLIMATE) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.CLIMATE) -RUNNING_MODE = {0x00: HVACMode.OFF, 0x03: HVACMode.COOL, 0x04: HVACMode.HEAT} - -SEQ_OF_OPERATION = { - 0x00: [HVACMode.OFF, HVACMode.COOL], # cooling only - 0x01: [HVACMode.OFF, HVACMode.COOL], # cooling with reheat - 0x02: [HVACMode.OFF, HVACMode.HEAT], # heating only - 0x03: [HVACMode.OFF, HVACMode.HEAT], # heating with reheat - # cooling and heating 4-pipes - 0x04: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], - # cooling and heating 4-pipes - 0x05: [HVACMode.OFF, HVACMode.HEAT_COOL, HVACMode.COOL, HVACMode.HEAT], - 0x06: [HVACMode.COOL, HVACMode.HEAT, HVACMode.OFF], # centralite specific - 0x07: [HVACMode.HEAT_COOL, HVACMode.OFF], # centralite specific +ZHA_TO_HA_HVAC_MODE = { + ZHAHVACMode.OFF: HVACMode.OFF, + ZHAHVACMode.AUTO: HVACMode.AUTO, + ZHAHVACMode.HEAT: HVACMode.HEAT, + ZHAHVACMode.COOL: HVACMode.COOL, + ZHAHVACMode.HEAT_COOL: HVACMode.HEAT_COOL, + ZHAHVACMode.DRY: HVACMode.DRY, + ZHAHVACMode.FAN_ONLY: HVACMode.FAN_ONLY, } -HVAC_MODE_2_SYSTEM = { - HVACMode.OFF: T.SystemMode.Off, - HVACMode.HEAT_COOL: T.SystemMode.Auto, - HVACMode.COOL: T.SystemMode.Cool, - HVACMode.HEAT: T.SystemMode.Heat, - HVACMode.FAN_ONLY: T.SystemMode.Fan_only, - HVACMode.DRY: T.SystemMode.Dry, +ZHA_TO_HA_HVAC_ACTION = { + ZHAHVACAction.OFF: HVACAction.OFF, + ZHAHVACAction.HEATING: HVACAction.HEATING, + ZHAHVACAction.COOLING: HVACAction.COOLING, + ZHAHVACAction.DRYING: HVACAction.DRYING, + ZHAHVACAction.IDLE: HVACAction.IDLE, + ZHAHVACAction.FAN: HVACAction.FAN, + ZHAHVACAction.PREHEATING: HVACAction.PREHEATING, } -SYSTEM_MODE_2_HVAC = { - T.SystemMode.Off: HVACMode.OFF, - T.SystemMode.Auto: HVACMode.HEAT_COOL, - T.SystemMode.Cool: HVACMode.COOL, - T.SystemMode.Heat: HVACMode.HEAT, - T.SystemMode.Emergency_Heating: HVACMode.HEAT, - T.SystemMode.Pre_cooling: HVACMode.COOL, # this is 'precooling'. is it the same? - T.SystemMode.Fan_only: HVACMode.FAN_ONLY, - T.SystemMode.Dry: HVACMode.DRY, - T.SystemMode.Sleep: HVACMode.OFF, -} - -ZCL_TEMP = 100 - async def async_setup_entry( hass: HomeAssistant, @@ -118,708 +71,168 @@ async def async_setup_entry( """Set up the Zigbee Home Automation sensor from config entry.""" zha_data = get_zha_data(hass) entities_to_create = zha_data.platforms[Platform.CLIMATE] + unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Thermostat, entities_to_create ), ) config_entry.async_on_unload(unsub) -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class Thermostat(ZhaEntity, ClimateEntity): +class Thermostat(ZHAEntity, ClimateEntity): """Representation of a ZHA Thermostat device.""" - DEFAULT_MAX_TEMP = 35 - DEFAULT_MIN_TEMP = 7 - _attr_precision = PRECISION_TENTHS _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_translation_key: str = "thermostat" _enable_turn_on_off_backwards_compatibility = False - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._thrm = self.cluster_handlers.get(CLUSTER_HANDLER_THERMOSTAT) - self._preset = PRESET_NONE - self._presets = [] - self._supported_flags = ( - ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA thermostat entity.""" + super().__init__(entity_data, **kwargs) + self._attr_hvac_modes = [ + ZHA_TO_HA_HVAC_MODE[mode] for mode in self.entity_data.entity.hvac_modes + ] + self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( + self.entity_data.entity.hvac_mode ) - self._fan = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) + self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( + self.entity_data.entity.hvac_action + ) + + features: ClimateEntityFeature = ClimateEntityFeature(0) + zha_features: ZHAClimateEntityFeature = ( + self.entity_data.entity.supported_features + ) + + if ZHAClimateEntityFeature.TARGET_TEMPERATURE in zha_features: + features |= ClimateEntityFeature.TARGET_TEMPERATURE + if ZHAClimateEntityFeature.TARGET_TEMPERATURE_RANGE in zha_features: + features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + if ZHAClimateEntityFeature.TARGET_HUMIDITY in zha_features: + features |= ClimateEntityFeature.TARGET_HUMIDITY + if ZHAClimateEntityFeature.PRESET_MODE in zha_features: + features |= ClimateEntityFeature.PRESET_MODE + if ZHAClimateEntityFeature.FAN_MODE in zha_features: + features |= ClimateEntityFeature.FAN_MODE + if ZHAClimateEntityFeature.SWING_MODE in zha_features: + features |= ClimateEntityFeature.SWING_MODE + if ZHAClimateEntityFeature.AUX_HEAT in zha_features: + features |= ClimateEntityFeature.AUX_HEAT + if ZHAClimateEntityFeature.TURN_OFF in zha_features: + features |= ClimateEntityFeature.TURN_OFF + if ZHAClimateEntityFeature.TURN_ON in zha_features: + features |= ClimateEntityFeature.TURN_ON + + self._attr_supported_features = features @property - def current_temperature(self): + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state + + return exclude_none_values( + { + "occupancy": state.get("occupancy"), + "occupied_cooling_setpoint": state.get("occupied_cooling_setpoint"), + "occupied_heating_setpoint": state.get("occupied_heating_setpoint"), + "pi_cooling_demand": state.get("pi_cooling_demand"), + "pi_heating_demand": state.get("pi_heating_demand"), + "system_mode": state.get("system_mode"), + "unoccupied_cooling_setpoint": state.get("unoccupied_cooling_setpoint"), + "unoccupied_heating_setpoint": state.get("unoccupied_heating_setpoint"), + } + ) + + @property + def current_temperature(self) -> float | None: """Return the current temperature.""" - if self._thrm.local_temperature is None: - return None - return self._thrm.local_temperature / ZCL_TEMP - - @property - def extra_state_attributes(self): - """Return device specific state attributes.""" - data = {} - if self.hvac_mode: - mode = SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode, "unknown") - data[ATTR_SYS_MODE] = f"[{self._thrm.system_mode}]/{mode}" - if self._thrm.occupancy is not None: - data[ATTR_OCCUPANCY] = self._thrm.occupancy - if self._thrm.occupied_cooling_setpoint is not None: - data[ATTR_OCCP_COOL_SETPT] = self._thrm.occupied_cooling_setpoint - if self._thrm.occupied_heating_setpoint is not None: - data[ATTR_OCCP_HEAT_SETPT] = self._thrm.occupied_heating_setpoint - if self._thrm.pi_heating_demand is not None: - data[ATTR_PI_HEATING_DEMAND] = self._thrm.pi_heating_demand - if self._thrm.pi_cooling_demand is not None: - data[ATTR_PI_COOLING_DEMAND] = self._thrm.pi_cooling_demand - - unoccupied_cooling_setpoint = self._thrm.unoccupied_cooling_setpoint - if unoccupied_cooling_setpoint is not None: - data[ATTR_UNOCCP_COOL_SETPT] = unoccupied_cooling_setpoint - - unoccupied_heating_setpoint = self._thrm.unoccupied_heating_setpoint - if unoccupied_heating_setpoint is not None: - data[ATTR_UNOCCP_HEAT_SETPT] = unoccupied_heating_setpoint - return data + return self.entity_data.entity.current_temperature @property def fan_mode(self) -> str | None: """Return current FAN mode.""" - if self._thrm.running_state is None: - return FAN_AUTO - - if self._thrm.running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return FAN_ON - return FAN_AUTO + return self.entity_data.entity.fan_mode @property def fan_modes(self) -> list[str] | None: """Return supported FAN modes.""" - if not self._fan: - return None - return [FAN_AUTO, FAN_ON] - - @property - def hvac_action(self) -> HVACAction | None: - """Return the current HVAC action.""" - if ( - self._thrm.pi_heating_demand is None - and self._thrm.pi_cooling_demand is None - ): - return self._rm_rs_action - return self._pi_demand_action - - @property - def _rm_rs_action(self) -> HVACAction | None: - """Return the current HVAC action based on running mode and running state.""" - - if (running_state := self._thrm.running_state) is None: - return None - if running_state & ( - T.RunningState.Heat_State_On | T.RunningState.Heat_2nd_Stage_On - ): - return HVACAction.HEATING - if running_state & ( - T.RunningState.Cool_State_On | T.RunningState.Cool_2nd_Stage_On - ): - return HVACAction.COOLING - if running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if running_state & T.RunningState.Idle: - return HVACAction.IDLE - if self.hvac_mode != HVACMode.OFF: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def _pi_demand_action(self) -> HVACAction | None: - """Return the current HVAC action based on pi_demands.""" - - heating_demand = self._thrm.pi_heating_demand - if heating_demand is not None and heating_demand > 0: - return HVACAction.HEATING - cooling_demand = self._thrm.pi_cooling_demand - if cooling_demand is not None and cooling_demand > 0: - return HVACAction.COOLING - - if self.hvac_mode != HVACMode.OFF: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def hvac_mode(self) -> HVACMode | None: - """Return HVAC operation mode.""" - return SYSTEM_MODE_2_HVAC.get(self._thrm.system_mode) - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return the list of available HVAC operation modes.""" - return SEQ_OF_OPERATION.get(self._thrm.ctrl_sequence_of_oper, [HVACMode.OFF]) + return self.entity_data.entity.fan_modes @property def preset_mode(self) -> str: """Return current preset mode.""" - return self._preset + return self.entity_data.entity.preset_mode @property def preset_modes(self) -> list[str] | None: """Return supported preset modes.""" - return self._presets + return self.entity_data.entity.preset_modes @property - def supported_features(self) -> ClimateEntityFeature: - """Return the list of supported features.""" - features = self._supported_flags - if HVACMode.HEAT_COOL in self.hvac_modes: - features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - if self._fan is not None: - self._supported_flags |= ClimateEntityFeature.FAN_MODE - return features - - @property - def target_temperature(self): + def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - temp = None - if self.hvac_mode == HVACMode.COOL: - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_cooling_setpoint - else: - temp = self._thrm.occupied_cooling_setpoint - elif self.hvac_mode == HVACMode.HEAT: - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_heating_setpoint - else: - temp = self._thrm.occupied_heating_setpoint - if temp is None: - return temp - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature @property - def target_temperature_high(self): + def target_temperature_high(self) -> float | None: """Return the upper bound temperature we try to reach.""" - if self.hvac_mode != HVACMode.HEAT_COOL: - return None - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_cooling_setpoint - else: - temp = self._thrm.occupied_cooling_setpoint - - if temp is None: - return temp - - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature_high @property - def target_temperature_low(self): + def target_temperature_low(self) -> float | None: """Return the lower bound temperature we try to reach.""" - if self.hvac_mode != HVACMode.HEAT_COOL: - return None - if self.preset_mode == PRESET_AWAY: - temp = self._thrm.unoccupied_heating_setpoint - else: - temp = self._thrm.occupied_heating_setpoint - - if temp is None: - return temp - return round(temp / ZCL_TEMP, 1) + return self.entity_data.entity.target_temperature_low @property def max_temp(self) -> float: """Return the maximum temperature.""" - temps = [] - if HVACMode.HEAT in self.hvac_modes: - temps.append(self._thrm.max_heat_setpoint_limit) - if HVACMode.COOL in self.hvac_modes: - temps.append(self._thrm.max_cool_setpoint_limit) - - if not temps: - return self.DEFAULT_MAX_TEMP - return round(max(temps) / ZCL_TEMP, 1) + return self.entity_data.entity.max_temp @property def min_temp(self) -> float: """Return the minimum temperature.""" - temps = [] - if HVACMode.HEAT in self.hvac_modes: - temps.append(self._thrm.min_heat_setpoint_limit) - if HVACMode.COOL in self.hvac_modes: - temps.append(self._thrm.min_cool_setpoint_limit) - - if not temps: - return self.DEFAULT_MIN_TEMP - return round(min(temps) / ZCL_TEMP, 1) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._thrm, SIGNAL_ATTR_UPDATED, self.async_attribute_updated - ) - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if ( - attr_name in (ATTR_OCCP_COOL_SETPT, ATTR_OCCP_HEAT_SETPT) - and self.preset_mode == PRESET_AWAY - ): - # occupancy attribute is an unreportable attribute, but if we get - # an attribute update for an "occupied" setpoint, there's a chance - # occupancy has changed - if await self._thrm.get_occupancy() is True: - self._preset = PRESET_NONE - - self.debug("Attribute '%s' = %s update", attr_name, value) - self.async_write_ha_state() - - async def async_set_fan_mode(self, fan_mode: str) -> None: - """Set fan mode.""" - if not self.fan_modes or fan_mode not in self.fan_modes: - self.warning("Unsupported '%s' fan mode", fan_mode) - return - - if fan_mode == FAN_ON: - mode = F.FanMode.On - else: - mode = F.FanMode.Auto - - await self._fan.async_set_speed(mode) - - async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set new target operation mode.""" - if hvac_mode not in self.hvac_modes: - self.warning( - "can't set '%s' mode. Supported modes are: %s", - hvac_mode, - self.hvac_modes, - ) - return - - if await self._thrm.async_set_operation_mode(HVAC_MODE_2_SYSTEM[hvac_mode]): - self.async_write_ha_state() - - async def async_set_preset_mode(self, preset_mode: str) -> None: - """Set new preset mode.""" - if not self.preset_modes or preset_mode not in self.preset_modes: - self.debug("Preset mode '%s' is not supported", preset_mode) - return - - if self.preset_mode not in ( - preset_mode, - PRESET_NONE, - ): - await self.async_preset_handler(self.preset_mode, enable=False) - - if preset_mode != PRESET_NONE: - await self.async_preset_handler(preset_mode, enable=True) - - self._preset = preset_mode - self.async_write_ha_state() - - async def async_set_temperature(self, **kwargs: Any) -> None: - """Set new target temperature.""" - low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW) - high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH) - temp = kwargs.get(ATTR_TEMPERATURE) - hvac_mode = kwargs.get(ATTR_HVAC_MODE) - - if hvac_mode is not None: - await self.async_set_hvac_mode(hvac_mode) - - is_away = self.preset_mode == PRESET_AWAY - - if self.hvac_mode == HVACMode.HEAT_COOL: - if low_temp is not None: - await self._thrm.async_set_heating_setpoint( - temperature=int(low_temp * ZCL_TEMP), - is_away=is_away, - ) - if high_temp is not None: - await self._thrm.async_set_cooling_setpoint( - temperature=int(high_temp * ZCL_TEMP), - is_away=is_away, - ) - elif temp is not None: - if self.hvac_mode == HVACMode.COOL: - await self._thrm.async_set_cooling_setpoint( - temperature=int(temp * ZCL_TEMP), - is_away=is_away, - ) - elif self.hvac_mode == HVACMode.HEAT: - await self._thrm.async_set_heating_setpoint( - temperature=int(temp * ZCL_TEMP), - is_away=is_away, - ) - else: - self.debug("Not setting temperature for '%s' mode", self.hvac_mode) - return - else: - self.debug("incorrect %s setting for '%s' mode", kwargs, self.hvac_mode) - return - - self.async_write_ha_state() - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode via handler.""" - - handler = getattr(self, f"async_preset_handler_{preset}") - await handler(enable) - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT, "sinope_manufacturer_specific"}, - manufacturers="Sinope Technologies", - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class SinopeTechnologiesThermostat(Thermostat): - """Sinope Technologies Thermostat.""" - - manufacturer = 0x119C - update_time_interval = timedelta(minutes=randint(45, 75)) - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [PRESET_AWAY, PRESET_NONE] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - self._manufacturer_ch = self.cluster_handlers["sinope_manufacturer_specific"] - - @property - def _rm_rs_action(self) -> HVACAction: - """Return the current HVAC action based on running mode and running state.""" - - running_mode = self._thrm.running_mode - if running_mode == T.SystemMode.Heat: - return HVACAction.HEATING - if running_mode == T.SystemMode.Cool: - return HVACAction.COOLING - - running_state = self._thrm.running_state - if running_state and running_state & ( - T.RunningState.Fan_State_On - | T.RunningState.Fan_2nd_Stage_On - | T.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if self.hvac_mode != HVACMode.OFF and running_mode == T.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF + return self.entity_data.entity.min_temp @callback - def _async_update_time(self, timestamp=None) -> None: - """Update thermostat's time display.""" - - secs_2k = ( - dt_util.now().replace(tzinfo=None) - datetime(2000, 1, 1, 0, 0, 0, 0) - ).total_seconds() - - self.debug("Updating time: %s", secs_2k) - self._manufacturer_ch.cluster.create_catching_task( - self._manufacturer_ch.write_attributes_safe( - {"secs_since_2k": secs_2k}, manufacturer=self.manufacturer - ) + def _handle_entity_events(self, event: Any) -> None: + """Entity state changed.""" + self._attr_hvac_mode = self._attr_hvac_mode = ZHA_TO_HA_HVAC_MODE.get( + self.entity_data.entity.hvac_mode ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to Hass.""" - await super().async_added_to_hass() - self.async_on_remove( - async_track_time_interval( - self.hass, self._async_update_time, self.update_time_interval - ) + self._attr_hvac_action = ZHA_TO_HA_HVAC_ACTION.get( + self.entity_data.entity.hvac_action ) - self._async_update_time() + super()._handle_entity_events(event) - async def async_preset_handler_away(self, is_away: bool = False) -> None: - """Set occupancy.""" - mfg_code = self._zha_device.manufacturer_code - await self._thrm.write_attributes_safe( - {"set_occupancy": 0 if is_away else 1}, manufacturer=mfg_code + @convert_zha_error_to_ha_error + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set fan mode.""" + await self.entity_data.entity.async_set_fan_mode(fan_mode=fan_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target operation mode.""" + await self.entity_data.entity.async_set_hvac_mode(hvac_mode=hvac_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) + self.async_write_ha_state() + + @convert_zha_error_to_ha_error + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + await self.entity_data.entity.async_set_temperature( + target_temp_low=kwargs.get(ATTR_TARGET_TEMP_LOW), + target_temp_high=kwargs.get(ATTR_TARGET_TEMP_HIGH), + temperature=kwargs.get(ATTR_TEMPERATURE), + hvac_mode=kwargs.get(ATTR_HVAC_MODE), ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - manufacturers={"Zen Within", "LUX"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class ZenWithinThermostat(Thermostat): - """Zen Within Thermostat implementation.""" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - aux_cluster_handlers=CLUSTER_HANDLER_FAN, - manufacturers="Centralite", - models={"3157100", "3157100-E"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class CentralitePearl(ZenWithinThermostat): - """Centralite Pearl Thermostat implementation.""" - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_ckud7u2l", - "_TZE200_ywdxldoj", - "_TZE200_cwnjrr72", - "_TZE200_2atgpdho", - "_TZE200_pvvbommb", - "_TZE200_4eeyebrt", - "_TZE200_cpmgn2cf", - "_TZE200_9sfg7gm0", - "_TZE200_8whxpsiw", - "_TYST11_ckud7u2l", - "_TYST11_ywdxldoj", - "_TYST11_cwnjrr72", - "_TYST11_2atgpdho", - }, -) -class MoesThermostat(Thermostat): - """Moes Thermostat implementation.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - PRESET_AWAY, - PRESET_SCHEDULE, - PRESET_COMFORT, - PRESET_ECO, - PRESET_BOOST, - PRESET_COMPLEX, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_AWAY - if value == 1: - self._preset = PRESET_SCHEDULE - if value == 2: - self._preset = PRESET_NONE - if value == 3: - self._preset = PRESET_COMFORT - if value == 4: - self._preset = PRESET_ECO - if value == 5: - self._preset = PRESET_BOOST - if value == 6: - self._preset = PRESET_COMPLEX - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 2}, manufacturer=mfg_code - ) - if preset == PRESET_AWAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_COMFORT: - return await self._thrm.write_attributes_safe( - {"operation_preset": 3}, manufacturer=mfg_code - ) - if preset == PRESET_ECO: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) - if preset == PRESET_BOOST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 5}, manufacturer=mfg_code - ) - if preset == PRESET_COMPLEX: - return await self._thrm.write_attributes_safe( - {"operation_preset": 6}, manufacturer=mfg_code - ) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_b6wax7g0", - }, -) -class BecaThermostat(Thermostat): - """Beca Thermostat implementation.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - PRESET_AWAY, - PRESET_SCHEDULE, - PRESET_ECO, - PRESET_BOOST, - PRESET_TEMP_MANUAL, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_AWAY - if value == 1: - self._preset = PRESET_SCHEDULE - if value == 2: - self._preset = PRESET_NONE - if value == 4: - self._preset = PRESET_ECO - if value == 5: - self._preset = PRESET_BOOST - if value == 7: - self._preset = PRESET_TEMP_MANUAL - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 2}, manufacturer=mfg_code - ) - if preset == PRESET_AWAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_ECO: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) - if preset == PRESET_BOOST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 5}, manufacturer=mfg_code - ) - if preset == PRESET_TEMP_MANUAL: - return await self._thrm.write_attributes_safe( - {"operation_preset": 7}, manufacturer=mfg_code - ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers="Stelpro", - models={"SORB"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -class StelproFanHeater(Thermostat): - """Stelpro Fan Heater implementation.""" - - @property - def hvac_modes(self) -> list[HVACMode]: - """Return only the heat mode, because the device can't be turned off.""" - return [HVACMode.HEAT] - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - manufacturers={ - "_TZE200_7yoranx2", - "_TZE200_e9ba97vf", # TV01-ZG - "_TZE200_hue3yfsn", # TV02-ZG - "_TZE200_husqqvux", # TSL-TRV-TV01ZG - "_TZE200_kds0pmmv", # MOES TRV TV02 - "_TZE200_kly8gjlz", # TV05-ZG - "_TZE200_lnbfnyxd", - "_TZE200_mudxchsu", - }, -) -class ZONNSMARTThermostat(Thermostat): - """ZONNSMART Thermostat implementation. - - Notice that this device uses two holiday presets (2: HolidayMode, - 3: HolidayModeTemp), but only one of them can be set. - """ - - PRESET_HOLIDAY = "holiday" - PRESET_FROST = "frost protect" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize ZHA Thermostat instance.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._presets = [ - PRESET_NONE, - self.PRESET_HOLIDAY, - PRESET_SCHEDULE, - self.PRESET_FROST, - ] - self._supported_flags |= ClimateEntityFeature.PRESET_MODE - - async def async_attribute_updated(self, attr_id, attr_name, value): - """Handle attribute update from device.""" - if attr_name == "operation_preset": - if value == 0: - self._preset = PRESET_SCHEDULE - if value == 1: - self._preset = PRESET_NONE - if value == 2: - self._preset = self.PRESET_HOLIDAY - if value == 3: - self._preset = self.PRESET_HOLIDAY - if value == 4: - self._preset = self.PRESET_FROST - await super().async_attribute_updated(attr_id, attr_name, value) - - async def async_preset_handler(self, preset: str, enable: bool = False) -> None: - """Set the preset mode.""" - mfg_code = self._zha_device.manufacturer_code - if not enable: - return await self._thrm.write_attributes_safe( - {"operation_preset": 1}, manufacturer=mfg_code - ) - if preset == PRESET_SCHEDULE: - return await self._thrm.write_attributes_safe( - {"operation_preset": 0}, manufacturer=mfg_code - ) - if preset == self.PRESET_HOLIDAY: - return await self._thrm.write_attributes_safe( - {"operation_preset": 3}, manufacturer=mfg_code - ) - if preset == self.PRESET_FROST: - return await self._thrm.write_attributes_safe( - {"operation_preset": 4}, manufacturer=mfg_code - ) + self.async_write_ha_state() diff --git a/homeassistant/components/zha/config_flow.py b/homeassistant/components/zha/config_flow.py index 9be27f7b37c..3a7b54652d9 100644 --- a/homeassistant/components/zha/config_flow.py +++ b/homeassistant/components/zha/config_flow.py @@ -10,6 +10,7 @@ from typing import Any import serial.tools.list_ports from serial.tools.list_ports_common import ListPortInfo import voluptuous as vol +from zha.application.const import RadioType import zigpy.backups from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH @@ -35,13 +36,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.selector import FileSelector, FileSelectorConfig from homeassistant.util import dt as dt_util -from .core.const import ( - CONF_BAUDRATE, - CONF_FLOW_CONTROL, - CONF_RADIO_TYPE, - DOMAIN, - RadioType, -) +from .const import CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN from .radio_manager import ( DEVICE_SCHEMA, HARDWARE_DISCOVERY_SCHEMA, @@ -146,12 +141,12 @@ class BaseZhaFlow(ConfigEntryBaseFlow): self._title: str | None = None @property - def hass(self): + def hass(self) -> HomeAssistant: """Return hass.""" return self._hass @hass.setter - def hass(self, hass): + def hass(self, hass: HomeAssistant) -> None: """Set hass.""" self._hass = hass self._radio_mgr.hass = hass diff --git a/homeassistant/components/zha/const.py b/homeassistant/components/zha/const.py new file mode 100644 index 00000000000..3986a99cf3f --- /dev/null +++ b/homeassistant/components/zha/const.py @@ -0,0 +1,76 @@ +"""Constants for the ZHA integration.""" + +EZSP_OVERWRITE_EUI64 = ( + "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" +) + +ATTR_ACTIVE_COORDINATOR = "active_coordinator" +ATTR_ATTRIBUTES = "attributes" +ATTR_AVAILABLE = "available" +ATTR_DEVICE_TYPE = "device_type" +ATTR_CLUSTER_NAME = "cluster_name" +ATTR_ENDPOINT_NAMES = "endpoint_names" +ATTR_IEEE = "ieee" +ATTR_LAST_SEEN = "last_seen" +ATTR_LQI = "lqi" +ATTR_MANUFACTURER = "manufacturer" +ATTR_MANUFACTURER_CODE = "manufacturer_code" +ATTR_NEIGHBORS = "neighbors" +ATTR_NWK = "nwk" +ATTR_POWER_SOURCE = "power_source" +ATTR_QUIRK_APPLIED = "quirk_applied" +ATTR_QUIRK_CLASS = "quirk_class" +ATTR_QUIRK_ID = "quirk_id" +ATTR_ROUTES = "routes" +ATTR_RSSI = "rssi" +ATTR_SIGNATURE = "signature" +ATTR_SUCCESS = "success" + + +CONF_ALARM_MASTER_CODE = "alarm_master_code" +CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" +CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" + +CONF_RADIO_TYPE = "radio_type" +CONF_USB_PATH = "usb_path" +CONF_USE_THREAD = "use_thread" +CONF_BAUDRATE = "baudrate" +CONF_FLOW_CONTROL = "flow_control" + +CONF_ENABLE_QUIRKS = "enable_quirks" +CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" + +CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" +CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" +CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" +CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" +CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" + +CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" +CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" +CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" + +CONF_ZIGPY = "zigpy_config" +CONF_DEVICE_CONFIG = "device_config" + +CUSTOM_CONFIGURATION = "custom_configuration" + +DATA_ZHA = "zha" +DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" + +DEFAULT_DATABASE_NAME = "zigbee.db" + +DEVICE_PAIRING_STATUS = "pairing_status" + +DOMAIN = "zha" + +GROUP_ID = "group_id" + + +GROUP_IDS = "group_ids" +GROUP_NAME = "group_name" + +MFG_CLUSTER_ID_START = 0xFC00 + +ZHA_ALARM_OPTIONS = "zha_alarm_options" +ZHA_OPTIONS = "zha_options" diff --git a/homeassistant/components/zha/core/__init__.py b/homeassistant/components/zha/core/__init__.py deleted file mode 100644 index 755eac3c4ce..00000000000 --- a/homeassistant/components/zha/core/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Core module for Zigbee Home Automation.""" - -from .device import ZHADevice -from .gateway import ZHAGateway - -__all__ = ["ZHADevice", "ZHAGateway"] diff --git a/homeassistant/components/zha/core/cluster_handlers/__init__.py b/homeassistant/components/zha/core/cluster_handlers/__init__.py deleted file mode 100644 index 8833d5c116f..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/__init__.py +++ /dev/null @@ -1,654 +0,0 @@ -"""Cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections.abc import Awaitable, Callable, Coroutine, Iterator -import contextlib -from enum import Enum -import functools -import logging -from typing import TYPE_CHECKING, Any, TypedDict - -import zigpy.exceptions -import zigpy.util -import zigpy.zcl -from zigpy.zcl.foundation import ( - CommandSchema, - ConfigureReportingResponseRecord, - Status, - ZCLAttributeDef, -) - -from homeassistant.const import ATTR_COMMAND -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.dispatcher import async_dispatcher_send - -from ..const import ( - ATTR_ARGS, - ATTR_ATTRIBUTE_ID, - ATTR_ATTRIBUTE_NAME, - ATTR_CLUSTER_ID, - ATTR_PARAMS, - ATTR_TYPE, - ATTR_UNIQUE_ID, - ATTR_VALUE, - CLUSTER_HANDLER_ZDO, - REPORT_CONFIG_ATTR_PER_REQ, - SIGNAL_ATTR_UPDATED, - ZHA_CLUSTER_HANDLER_MSG, - ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA, - ZHA_CLUSTER_HANDLER_READS_PER_REQ, -) -from ..helpers import LogMixin, safe_read - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -_LOGGER = logging.getLogger(__name__) -RETRYABLE_REQUEST_DECORATOR = zigpy.util.retryable_request(tries=3) -UNPROXIED_CLUSTER_METHODS = {"general_command"} - -type _FuncType[**_P] = Callable[_P, Awaitable[Any]] -type _ReturnFuncType[**_P] = Callable[_P, Coroutine[Any, Any, Any]] - - -@contextlib.contextmanager -def wrap_zigpy_exceptions() -> Iterator[None]: - """Wrap zigpy exceptions in `HomeAssistantError` exceptions.""" - try: - yield - except TimeoutError as exc: - raise HomeAssistantError( - "Failed to send request: device did not respond" - ) from exc - except zigpy.exceptions.ZigbeeException as exc: - message = "Failed to send request" - - if str(exc): - message = f"{message}: {exc}" - - raise HomeAssistantError(message) from exc - - -def retry_request[**_P](func: _FuncType[_P]) -> _ReturnFuncType[_P]: - """Send a request with retries and wrap expected zigpy exceptions.""" - - @functools.wraps(func) - async def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> Any: - with wrap_zigpy_exceptions(): - return await RETRYABLE_REQUEST_DECORATOR(func)(*args, **kwargs) - - return wrapper - - -class AttrReportConfig(TypedDict, total=True): - """Configuration to report for the attributes.""" - - # An attribute name - attr: str - # The config for the attribute reporting configuration consists of a tuple for - # (minimum_reported_time_interval_s, maximum_reported_time_interval_s, value_delta) - config: tuple[int, int, int | float] - - -def parse_and_log_command(cluster_handler, tsn, command_id, args): - """Parse and log a zigbee cluster command.""" - try: - name = cluster_handler.cluster.server_commands[command_id].name - except KeyError: - name = f"0x{command_id:02X}" - - cluster_handler.debug( - "received '%s' command with %s args on cluster_id '%s' tsn '%s'", - name, - args, - cluster_handler.cluster.cluster_id, - tsn, - ) - return name - - -class ClusterHandlerStatus(Enum): - """Status of a cluster handler.""" - - CREATED = 1 - CONFIGURED = 2 - INITIALIZED = 3 - - -class ClusterHandler(LogMixin): - """Base cluster handler for a Zigbee cluster.""" - - REPORT_CONFIG: tuple[AttrReportConfig, ...] = () - BIND: bool = True - - # Dict of attributes to read on cluster handler initialization. - # Dict keys -- attribute ID or names, with bool value indicating whether a cached - # attribute read is acceptable. - ZCL_INIT_ATTRS: dict[str, bool] = {} - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize ClusterHandler.""" - self._generic_id = f"cluster_handler_0x{cluster.cluster_id:04x}" - self._endpoint: Endpoint = endpoint - self._cluster = cluster - self._id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" - unique_id = endpoint.unique_id.replace("-", ":") - self._unique_id = f"{unique_id}:0x{cluster.cluster_id:04x}" - if not hasattr(self, "_value_attribute") and self.REPORT_CONFIG: - attr_def: ZCLAttributeDef = self.cluster.attributes_by_name[ - self.REPORT_CONFIG[0]["attr"] - ] - self.value_attribute = attr_def.id - self._status = ClusterHandlerStatus.CREATED - self._cluster.add_listener(self) - self.data_cache: dict[str, Enum] = {} - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return True - - @property - def id(self) -> str: - """Return cluster handler id unique for this device only.""" - return self._id - - @property - def generic_id(self): - """Return the generic id for this cluster handler.""" - return self._generic_id - - @property - def unique_id(self): - """Return the unique id for this cluster handler.""" - return self._unique_id - - @property - def cluster(self): - """Return the zigpy cluster for this cluster handler.""" - return self._cluster - - @property - def name(self) -> str: - """Return friendly name.""" - return self.cluster.ep_attribute or self._generic_id - - @property - def status(self): - """Return the status of the cluster handler.""" - return self._status - - def __hash__(self) -> int: - """Make this a hashable.""" - return hash(self._unique_id) - - @callback - def async_send_signal(self, signal: str, *args: Any) -> None: - """Send a signal through hass dispatcher.""" - self._endpoint.async_send_signal(signal, *args) - - async def bind(self): - """Bind a zigbee cluster. - - This also swallows ZigbeeException exceptions that are thrown when - devices are unreachable. - """ - try: - res = await self.cluster.bind() - self.debug("bound '%s' cluster: %s", self.cluster.ep_attribute, res[0]) - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "success": res[0] == 0, - }, - }, - ) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to bind '%s' cluster: %s", - self.cluster.ep_attribute, - str(ex), - exc_info=ex, - ) - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "success": False, - }, - }, - ) - - async def configure_reporting(self) -> None: - """Configure attribute reporting for a cluster. - - This also swallows ZigbeeException exceptions that are thrown when - devices are unreachable. - """ - event_data = {} - kwargs = {} - if ( - self.cluster.cluster_id >= 0xFC00 - and self._endpoint.device.manufacturer_code - ): - kwargs["manufacturer"] = self._endpoint.device.manufacturer_code - - for attr_report in self.REPORT_CONFIG: - attr, config = attr_report["attr"], attr_report["config"] - - try: - attr_name = self.cluster.find_attribute(attr).name - except KeyError: - attr_name = attr - - event_data[attr_name] = { - "min": config[0], - "max": config[1], - "id": attr, - "name": attr_name, - "change": config[2], - "status": None, - } - - to_configure = [*self.REPORT_CONFIG] - chunk, rest = ( - to_configure[:REPORT_CONFIG_ATTR_PER_REQ], - to_configure[REPORT_CONFIG_ATTR_PER_REQ:], - ) - while chunk: - reports = {rec["attr"]: rec["config"] for rec in chunk} - try: - res = await self.cluster.configure_reporting_multiple(reports, **kwargs) - self._configure_reporting_status(reports, res[0], event_data) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "failed to set reporting on '%s' cluster for: %s", - self.cluster.ep_attribute, - str(ex), - ) - break - chunk, rest = ( - rest[:REPORT_CONFIG_ATTR_PER_REQ], - rest[REPORT_CONFIG_ATTR_PER_REQ:], - ) - - async_dispatcher_send( - self._endpoint.device.hass, - ZHA_CLUSTER_HANDLER_MSG, - { - ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, - ZHA_CLUSTER_HANDLER_MSG_DATA: { - "cluster_name": self.cluster.name, - "cluster_id": self.cluster.cluster_id, - "attributes": event_data, - }, - }, - ) - - def _configure_reporting_status( - self, - attrs: dict[str, tuple[int, int, float | int]], - res: list | tuple, - event_data: dict[str, dict[str, Any]], - ) -> None: - """Parse configure reporting result.""" - if isinstance(res, (Exception, ConfigureReportingResponseRecord)): - # assume default response - self.debug( - "attr reporting for '%s' on '%s': %s", - attrs, - self.name, - res, - ) - for attr in attrs: - event_data[attr]["status"] = Status.FAILURE.name - return - if res[0].status == Status.SUCCESS and len(res) == 1: - self.debug( - "Successfully configured reporting for '%s' on '%s' cluster: %s", - attrs, - self.name, - res, - ) - # 2.5.8.1.3 Status Field - # The status field specifies the status of the Configure Reporting operation attempted on this attribute, as detailed in 2.5.7.3. - # Note that attribute status records are not included for successfully configured attributes, in order to save bandwidth. - # In the case of successful configuration of all attributes, only a single attribute status record SHALL be included in the command, - # with the status field set to SUCCESS and the direction and attribute identifier fields omitted. - for attr in attrs: - event_data[attr]["status"] = Status.SUCCESS.name - return - - for record in res: - event_data[self.cluster.find_attribute(record.attrid).name]["status"] = ( - record.status.name - ) - failed = [ - self.cluster.find_attribute(record.attrid).name - for record in res - if record.status != Status.SUCCESS - ] - self.debug( - "Failed to configure reporting for '%s' on '%s' cluster: %s", - failed, - self.name, - res, - ) - success = set(attrs) - set(failed) - self.debug( - "Successfully configured reporting for '%s' on '%s' cluster", - set(attrs) - set(failed), - self.name, - ) - for attr in success: - event_data[attr]["status"] = Status.SUCCESS.name - - async def async_configure(self) -> None: - """Set cluster binding and attribute reporting.""" - if not self._endpoint.device.skip_configuration: - if self.BIND: - self.debug("Performing cluster binding") - await self.bind() - if self.cluster.is_server: - self.debug("Configuring cluster attribute reporting") - await self.configure_reporting() - ch_specific_cfg = getattr( - self, "async_configure_cluster_handler_specific", None - ) - if ch_specific_cfg: - self.debug("Performing cluster handler specific configuration") - await ch_specific_cfg() - self.debug("finished cluster handler configuration") - else: - self.debug("skipping cluster handler configuration") - self._status = ClusterHandlerStatus.CONFIGURED - - async def async_initialize(self, from_cache: bool) -> None: - """Initialize cluster handler.""" - if not from_cache and self._endpoint.device.skip_configuration: - self.debug("Skipping cluster handler initialization") - self._status = ClusterHandlerStatus.INITIALIZED - return - - self.debug("initializing cluster handler: from_cache: %s", from_cache) - cached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if cached] - uncached = [a for a, cached in self.ZCL_INIT_ATTRS.items() if not cached] - uncached.extend([cfg["attr"] for cfg in self.REPORT_CONFIG]) - - if cached: - self.debug("initializing cached cluster handler attributes: %s", cached) - await self._get_attributes( - True, cached, from_cache=True, only_cache=from_cache - ) - if uncached: - self.debug( - "initializing uncached cluster handler attributes: %s - from cache[%s]", - uncached, - from_cache, - ) - await self._get_attributes( - True, uncached, from_cache=from_cache, only_cache=from_cache - ) - - ch_specific_init = getattr( - self, "async_initialize_cluster_handler_specific", None - ) - if ch_specific_init: - self.debug( - "Performing cluster handler specific initialization: %s", uncached - ) - await ch_specific_init(from_cache=from_cache) - - self.debug("finished cluster handler initialization") - self._status = ClusterHandlerStatus.INITIALIZED - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "cluster_handler[%s] attribute_updated - cluster[%s] attr[%s] value[%s]", - self.name, - self.cluster.name, - attr_name, - value, - ) - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - - @callback - def zdo_command(self, *args, **kwargs): - """Handle ZDO commands on this cluster.""" - - @callback - def zha_send_event(self, command: str, arg: list | dict | CommandSchema) -> None: - """Relay events to hass.""" - - args: list | dict - if isinstance(arg, CommandSchema): - args = [a for a in arg if a is not None] - params = arg.as_dict() - elif isinstance(arg, (list, dict)): - # Quirks can directly send lists and dicts to ZHA this way - args = arg - params = {} - else: - raise TypeError(f"Unexpected zha_send_event {command!r} argument: {arg!r}") - - self._endpoint.send_event( - { - ATTR_UNIQUE_ID: self.unique_id, - ATTR_CLUSTER_ID: self.cluster.cluster_id, - ATTR_COMMAND: command, - # Maintain backwards compatibility with the old zigpy response format - ATTR_ARGS: args, - ATTR_PARAMS: params, - } - ) - - async def async_update(self): - """Retrieve latest state from cluster.""" - - def _get_attribute_name(self, attrid: int) -> str | int: - if attrid not in self.cluster.attributes: - return attrid - - return self.cluster.attributes[attrid].name - - async def get_attribute_value(self, attribute, from_cache=True): - """Get the value for an attribute.""" - manufacturer = None - manufacturer_code = self._endpoint.device.manufacturer_code - if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: - manufacturer = manufacturer_code - result = await safe_read( - self._cluster, - [attribute], - allow_cache=from_cache, - only_cache=from_cache, - manufacturer=manufacturer, - ) - return result.get(attribute) - - async def _get_attributes( - self, - raise_exceptions: bool, - attributes: list[str], - from_cache: bool = True, - only_cache: bool = True, - ) -> dict[int | str, Any]: - """Get the values for a list of attributes.""" - manufacturer = None - manufacturer_code = self._endpoint.device.manufacturer_code - if self.cluster.cluster_id >= 0xFC00 and manufacturer_code: - manufacturer = manufacturer_code - chunk = attributes[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] - rest = attributes[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] - result = {} - while chunk: - try: - self.debug("Reading attributes in chunks: %s", chunk) - read, _ = await self.cluster.read_attributes( - chunk, - allow_cache=from_cache, - only_cache=only_cache, - manufacturer=manufacturer, - ) - result.update(read) - except (TimeoutError, zigpy.exceptions.ZigbeeException) as ex: - self.debug( - "failed to get attributes '%s' on '%s' cluster: %s", - chunk, - self.cluster.ep_attribute, - str(ex), - ) - if raise_exceptions: - raise - chunk = rest[:ZHA_CLUSTER_HANDLER_READS_PER_REQ] - rest = rest[ZHA_CLUSTER_HANDLER_READS_PER_REQ:] - return result - - get_attributes = functools.partialmethod(_get_attributes, False) - - async def write_attributes_safe( - self, attributes: dict[str, Any], manufacturer: int | None = None - ) -> None: - """Wrap `write_attributes` to throw an exception on attribute write failure.""" - - res = await self.write_attributes(attributes, manufacturer=manufacturer) - - for record in res[0]: - if record.status != Status.SUCCESS: - try: - name = self.cluster.attributes[record.attrid].name - value = attributes.get(name, "unknown") - except KeyError: - name = f"0x{record.attrid:04x}" - value = "unknown" - - raise HomeAssistantError( - f"Failed to write attribute {name}={value}: {record.status}", - ) - - def log(self, level, msg, *args, **kwargs): - """Log a message.""" - msg = f"[%s:%s]: {msg}" - args = (self._endpoint.device.nwk, self._id, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - def __getattr__(self, name): - """Get attribute or a decorated cluster command.""" - if ( - hasattr(self._cluster, name) - and callable(getattr(self._cluster, name)) - and name not in UNPROXIED_CLUSTER_METHODS - ): - command = getattr(self._cluster, name) - wrapped_command = retry_request(command) - wrapped_command.__name__ = name - - return wrapped_command - return self.__getattribute__(name) - - -class ZDOClusterHandler(LogMixin): - """Cluster handler for ZDO events.""" - - def __init__(self, device) -> None: - """Initialize ZDOClusterHandler.""" - self.name = CLUSTER_HANDLER_ZDO - self._cluster = device.device.endpoints[0] - self._zha_device = device - self._status = ClusterHandlerStatus.CREATED - self._unique_id = f"{device.ieee!s}:{device.name}_ZDO" - self._cluster.add_listener(self) - - @property - def unique_id(self): - """Return the unique id for this cluster handler.""" - return self._unique_id - - @property - def cluster(self): - """Return the aigpy cluster for this cluster handler.""" - return self._cluster - - @property - def status(self): - """Return the status of the cluster handler.""" - return self._status - - @callback - def device_announce(self, zigpy_device): - """Device announce handler.""" - - @callback - def permit_duration(self, duration): - """Permit handler.""" - - async def async_initialize(self, from_cache): - """Initialize cluster handler.""" - self._status = ClusterHandlerStatus.INITIALIZED - - async def async_configure(self): - """Configure cluster handler.""" - self._status = ClusterHandlerStatus.CONFIGURED - - def log(self, level, msg, *args, **kwargs): - """Log a message.""" - msg = f"[%s:ZDO](%s): {msg}" - args = (self._zha_device.nwk, self._zha_device.model, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - -class ClientClusterHandler(ClusterHandler): - """ClusterHandler for Zigbee client (output) clusters.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: - """Handle an attribute updated on this cluster.""" - super().attribute_updated(attrid, value, timestamp) - - try: - attr_name = self._cluster.attributes[attrid].name - except KeyError: - attr_name = "Unknown" - - self.zha_send_event( - SIGNAL_ATTR_UPDATED, - { - ATTR_ATTRIBUTE_ID: attrid, - ATTR_ATTRIBUTE_NAME: attr_name, - ATTR_VALUE: value, - }, - ) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - if ( - self._cluster.server_commands is not None - and self._cluster.server_commands.get(command_id) is not None - ): - self.zha_send_event(self._cluster.server_commands[command_id].name, args) diff --git a/homeassistant/components/zha/core/cluster_handlers/closures.py b/homeassistant/components/zha/core/cluster_handlers/closures.py deleted file mode 100644 index e96d6492beb..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/closures.py +++ /dev/null @@ -1,271 +0,0 @@ -"""Closures cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from typing import Any - -import zigpy.types as t -from zigpy.zcl.clusters.closures import ConfigStatus, DoorLock, Shade, WindowCovering - -from homeassistant.core import callback - -from .. import registries -from ..const import REPORT_CONFIG_IMMEDIATE, SIGNAL_ATTR_UPDATED -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DoorLock.cluster_id) -class DoorLockClusterHandler(ClusterHandler): - """Door lock cluster handler.""" - - _value_attribute = 0 - REPORT_CONFIG = ( - AttrReportConfig( - attr=DoorLock.AttributeDefs.lock_state.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - async def async_update(self): - """Retrieve latest state.""" - result = await self.get_attribute_value( - DoorLock.AttributeDefs.lock_state.name, from_cache=True - ) - if result is not None: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - DoorLock.AttributeDefs.lock_state.id, - DoorLock.AttributeDefs.lock_state.name, - result, - ) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - - if ( - self._cluster.client_commands is None - or self._cluster.client_commands.get(command_id) is None - ): - return - - command_name = self._cluster.client_commands[command_id].name - - if command_name == DoorLock.ClientCommandDefs.operation_event_notification.name: - self.zha_send_event( - command_name, - { - "source": args[0].name, - "operation": args[1].name, - "code_slot": (args[2] + 1), # start code slots at 1 - }, - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from lock cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attrid == self._value_attribute: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - async def async_set_user_code(self, code_slot: int, user_code: str) -> None: - """Set the user code for the code slot.""" - - await self.set_pin_code( - code_slot - 1, # start code slots at 1, Zigbee internals use 0 - DoorLock.UserStatus.Enabled, - DoorLock.UserType.Unrestricted, - user_code, - ) - - async def async_enable_user_code(self, code_slot: int) -> None: - """Enable the code slot.""" - - await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Enabled) - - async def async_disable_user_code(self, code_slot: int) -> None: - """Disable the code slot.""" - - await self.set_user_status(code_slot - 1, DoorLock.UserStatus.Disabled) - - async def async_get_user_code(self, code_slot: int) -> int: - """Get the user code from the code slot.""" - - return await self.get_pin_code(code_slot - 1) - - async def async_clear_user_code(self, code_slot: int) -> None: - """Clear the code slot.""" - - await self.clear_pin_code(code_slot - 1) - - async def async_clear_all_user_codes(self) -> None: - """Clear all code slots.""" - - await self.clear_all_pin_codes() - - async def async_set_user_type(self, code_slot: int, user_type: str) -> None: - """Set user type.""" - - await self.set_user_type(code_slot - 1, user_type) - - async def async_get_user_type(self, code_slot: int) -> str: - """Get user type.""" - - return await self.get_user_type(code_slot - 1) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Shade.cluster_id) -class ShadeClusterHandler(ClusterHandler): - """Shade cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) -class WindowCoveringClientClusterHandler(ClientClusterHandler): - """Window client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(WindowCovering.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(WindowCovering.cluster_id) -class WindowCoveringClusterHandler(ClusterHandler): - """Window cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=WindowCovering.AttributeDefs.current_position_lift_percentage.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - AttrReportConfig( - attr=WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - ZCL_INIT_ATTRS = { - WindowCovering.AttributeDefs.window_covering_type.name: True, - WindowCovering.AttributeDefs.window_covering_mode.name: True, - WindowCovering.AttributeDefs.config_status.name: True, - WindowCovering.AttributeDefs.installed_closed_limit_lift.name: True, - WindowCovering.AttributeDefs.installed_closed_limit_tilt.name: True, - WindowCovering.AttributeDefs.installed_open_limit_lift.name: True, - WindowCovering.AttributeDefs.installed_open_limit_tilt.name: True, - } - - async def async_update(self): - """Retrieve latest state.""" - results = await self.get_attributes( - [ - WindowCovering.AttributeDefs.current_position_lift_percentage.name, - WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - ], - from_cache=False, - only_cache=False, - ) - self.debug( - "read current_position_lift_percentage and current_position_tilt_percentage - results: %s", - results, - ) - if ( - results - and results.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ) - is not None - ): - # the 100 - value is because we need to invert the value before giving it to the entity - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - WindowCovering.AttributeDefs.current_position_lift_percentage.id, - WindowCovering.AttributeDefs.current_position_lift_percentage.name, - 100 - - results.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ), - ) - if ( - results - and results.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ) - is not None - ): - # the 100 - value is because we need to invert the value before giving it to the entity - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - WindowCovering.AttributeDefs.current_position_tilt_percentage.id, - WindowCovering.AttributeDefs.current_position_tilt_percentage.name, - 100 - - results.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ), - ) - - @property - def inverted(self): - """Return true if the window covering is inverted.""" - config_status = self.cluster.get( - WindowCovering.AttributeDefs.config_status.name - ) - return ( - config_status is not None - and ConfigStatus.Open_up_commands_reversed in ConfigStatus(config_status) - ) - - @property - def current_position_lift_percentage(self) -> t.uint16_t | None: - """Return the current lift percentage of the window covering.""" - lift_percentage = self.cluster.get( - WindowCovering.AttributeDefs.current_position_lift_percentage.name - ) - if lift_percentage is not None: - # the 100 - value is because we need to invert the value before giving it to the entity - lift_percentage = 100 - lift_percentage - return lift_percentage - - @property - def current_position_tilt_percentage(self) -> t.uint16_t | None: - """Return the current tilt percentage of the window covering.""" - tilt_percentage = self.cluster.get( - WindowCovering.AttributeDefs.current_position_tilt_percentage.name - ) - if tilt_percentage is not None: - # the 100 - value is because we need to invert the value before giving it to the entity - tilt_percentage = 100 - tilt_percentage - return tilt_percentage - - @property - def installed_open_limit_lift(self) -> t.uint16_t | None: - """Return the installed open lift limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_open_limit_lift.name - ) - - @property - def installed_closed_limit_lift(self) -> t.uint16_t | None: - """Return the installed closed lift limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_closed_limit_lift.name - ) - - @property - def installed_open_limit_tilt(self) -> t.uint16_t | None: - """Return the installed open tilt limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_open_limit_tilt.name - ) - - @property - def installed_closed_limit_tilt(self) -> t.uint16_t | None: - """Return the installed closed tilt limit of the window covering.""" - return self.cluster.get( - WindowCovering.AttributeDefs.installed_closed_limit_tilt.name - ) - - @property - def window_covering_type(self) -> WindowCovering.WindowCoveringType | None: - """Return the window covering type.""" - return self.cluster.get(WindowCovering.AttributeDefs.window_covering_type.name) diff --git a/homeassistant/components/zha/core/cluster_handlers/general.py b/homeassistant/components/zha/core/cluster_handlers/general.py deleted file mode 100644 index 438fc6b1723..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/general.py +++ /dev/null @@ -1,690 +0,0 @@ -"""General cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections.abc import Coroutine -from typing import TYPE_CHECKING, Any - -from zhaquirks.quirk_ids import TUYA_PLUG_ONOFF -import zigpy.exceptions -import zigpy.types as t -import zigpy.zcl -from zigpy.zcl.clusters.general import ( - Alarms, - AnalogInput, - AnalogOutput, - AnalogValue, - ApplianceControl, - Basic, - BinaryInput, - BinaryOutput, - BinaryValue, - Commissioning, - DeviceTemperature, - GreenPowerProxy, - Groups, - Identify, - LevelControl, - MultistateInput, - MultistateOutput, - MultistateValue, - OnOff, - OnOffConfiguration, - Ota, - Partition, - PollControl, - PowerConfiguration, - PowerProfile, - RSSILocation, - Scenes, - Time, -) -from zigpy.zcl.foundation import Status - -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.event import async_call_later - -from .. import registries -from ..const import ( - REPORT_CONFIG_ASAP, - REPORT_CONFIG_BATTERY_SAVE, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - SIGNAL_ATTR_UPDATED, - SIGNAL_MOVE_LEVEL, - SIGNAL_SET_LEVEL, - SIGNAL_UPDATE_DEVICE, -) -from . import ( - AttrReportConfig, - ClientClusterHandler, - ClusterHandler, - parse_and_log_command, -) -from .helpers import is_hue_motion_sensor - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Alarms.cluster_id) -class AlarmsClusterHandler(ClusterHandler): - """Alarms cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInput.cluster_id) -class AnalogInputClusterHandler(ClusterHandler): - """Analog Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.BINDABLE_CLUSTERS.register(AnalogOutput.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutput.cluster_id) -class AnalogOutputClusterHandler(ClusterHandler): - """Analog Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - ZCL_INIT_ATTRS = { - AnalogOutput.AttributeDefs.min_present_value.name: True, - AnalogOutput.AttributeDefs.max_present_value.name: True, - AnalogOutput.AttributeDefs.resolution.name: True, - AnalogOutput.AttributeDefs.relinquish_default.name: True, - AnalogOutput.AttributeDefs.description.name: True, - AnalogOutput.AttributeDefs.engineering_units.name: True, - AnalogOutput.AttributeDefs.application_type.name: True, - } - - @property - def present_value(self) -> float | None: - """Return cached value of present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.present_value.name) - - @property - def min_present_value(self) -> float | None: - """Return cached value of min_present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.min_present_value.name) - - @property - def max_present_value(self) -> float | None: - """Return cached value of max_present_value.""" - return self.cluster.get(AnalogOutput.AttributeDefs.max_present_value.name) - - @property - def resolution(self) -> float | None: - """Return cached value of resolution.""" - return self.cluster.get(AnalogOutput.AttributeDefs.resolution.name) - - @property - def relinquish_default(self) -> float | None: - """Return cached value of relinquish_default.""" - return self.cluster.get(AnalogOutput.AttributeDefs.relinquish_default.name) - - @property - def description(self) -> str | None: - """Return cached value of description.""" - return self.cluster.get(AnalogOutput.AttributeDefs.description.name) - - @property - def engineering_units(self) -> int | None: - """Return cached value of engineering_units.""" - return self.cluster.get(AnalogOutput.AttributeDefs.engineering_units.name) - - @property - def application_type(self) -> int | None: - """Return cached value of application_type.""" - return self.cluster.get(AnalogOutput.AttributeDefs.application_type.name) - - async def async_set_present_value(self, value: float) -> None: - """Update present_value.""" - await self.write_attributes_safe( - {AnalogOutput.AttributeDefs.present_value.name: value} - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValue.cluster_id) -class AnalogValueClusterHandler(ClusterHandler): - """Analog Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=AnalogValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceControl.cluster_id) -class ApplianceControlClusterHandler(ClusterHandler): - """Appliance Control cluster handler.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(Basic.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Basic.cluster_id) -class BasicClusterHandler(ClusterHandler): - """Cluster handler to interact with the basic cluster.""" - - UNKNOWN = 0 - BATTERY = 3 - BIND: bool = False - - POWER_SOURCES = { - UNKNOWN: "Unknown", - 1: "Mains (single phase)", - 2: "Mains (3 phase)", - BATTERY: "Battery", - 4: "DC source", - 5: "Emergency mains constantly powered", - 6: "Emergency mains and transfer switch", - } - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Basic cluster handler.""" - super().__init__(cluster, endpoint) - if is_hue_motion_sensor(self) and self.cluster.endpoint.endpoint_id == 2: - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["trigger_indicator"] = True - elif ( - self.cluster.endpoint.manufacturer == "TexasInstruments" - and self.cluster.endpoint.model == "ti.router" - ): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["transmit_power"] = True - elif self.cluster.endpoint.model == "lumi.curtain.agl001": - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["power_source"] = True - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInput.cluster_id) -class BinaryInputClusterHandler(ClusterHandler): - """Binary Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutput.cluster_id) -class BinaryOutputClusterHandler(ClusterHandler): - """Binary Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValue.cluster_id) -class BinaryValueClusterHandler(ClusterHandler): - """Binary Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=BinaryValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Commissioning.cluster_id) -class CommissioningClusterHandler(ClusterHandler): - """Commissioning cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceTemperature.cluster_id) -class DeviceTemperatureClusterHandler(ClusterHandler): - """Device Temperature cluster handler.""" - - REPORT_CONFIG = ( - { - "attr": DeviceTemperature.AttributeDefs.current_temperature.name, - "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - }, - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GreenPowerProxy.cluster_id) -class GreenPowerProxyClusterHandler(ClusterHandler): - """Green Power Proxy cluster handler.""" - - BIND: bool = False - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Groups.cluster_id) -class GroupsClusterHandler(ClusterHandler): - """Groups cluster handler.""" - - BIND: bool = False - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Identify.cluster_id) -class IdentifyClusterHandler(ClusterHandler): - """Identify cluster handler.""" - - BIND: bool = False - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd == Identify.ServerCommandDefs.trigger_effect.name: - self.async_send_signal(f"{self.unique_id}_{cmd}", args[0]) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) -class LevelControlClientClusterHandler(ClientClusterHandler): - """LevelControl client cluster.""" - - -@registries.BINDABLE_CLUSTERS.register(LevelControl.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LevelControl.cluster_id) -class LevelControlClusterHandler(ClusterHandler): - """Cluster handler for the LevelControl Zigbee cluster.""" - - CURRENT_LEVEL = 0 - REPORT_CONFIG = ( - AttrReportConfig( - attr=LevelControl.AttributeDefs.current_level.name, - config=REPORT_CONFIG_ASAP, - ), - ) - ZCL_INIT_ATTRS = { - LevelControl.AttributeDefs.on_off_transition_time.name: True, - LevelControl.AttributeDefs.on_level.name: True, - LevelControl.AttributeDefs.on_transition_time.name: True, - LevelControl.AttributeDefs.off_transition_time.name: True, - LevelControl.AttributeDefs.default_move_rate.name: True, - LevelControl.AttributeDefs.start_up_current_level.name: True, - } - - @property - def current_level(self) -> int | None: - """Return cached value of the current_level attribute.""" - return self.cluster.get(LevelControl.AttributeDefs.current_level.name) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd in ( - LevelControl.ServerCommandDefs.move_to_level.name, - LevelControl.ServerCommandDefs.move_to_level_with_on_off.name, - ): - self.dispatch_level_change(SIGNAL_SET_LEVEL, args[0]) - elif cmd in ( - LevelControl.ServerCommandDefs.move.name, - LevelControl.ServerCommandDefs.move_with_on_off.name, - ): - # We should dim slowly -- for now, just step once - rate = args[1] - if args[0] == 0xFF: - rate = 10 # Should read default move rate - self.dispatch_level_change(SIGNAL_MOVE_LEVEL, -rate if args[0] else rate) - elif cmd in ( - LevelControl.ServerCommandDefs.step.name, - LevelControl.ServerCommandDefs.step_with_on_off.name, - ): - # Step (technically may change on/off) - self.dispatch_level_change( - SIGNAL_MOVE_LEVEL, -args[1] if args[0] else args[1] - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - self.debug("received attribute: %s update with value: %s", attrid, value) - if attrid == self.CURRENT_LEVEL: - self.dispatch_level_change(SIGNAL_SET_LEVEL, value) - - def dispatch_level_change(self, command, level): - """Dispatch level change.""" - self.async_send_signal(f"{self.unique_id}_{command}", level) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInput.cluster_id) -class MultistateInputClusterHandler(ClusterHandler): - """Multistate Input cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateInput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutput.cluster_id) -class MultistateOutputClusterHandler(ClusterHandler): - """Multistate Output cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateOutput.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValue.cluster_id) -class MultistateValueClusterHandler(ClusterHandler): - """Multistate Value cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=MultistateValue.AttributeDefs.present_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) -class OnOffClientClusterHandler(ClientClusterHandler): - """OnOff client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(OnOff.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOff.cluster_id) -class OnOffClusterHandler(ClusterHandler): - """Cluster handler for the OnOff Zigbee cluster.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=OnOff.AttributeDefs.on_off.name, config=REPORT_CONFIG_IMMEDIATE - ), - ) - ZCL_INIT_ATTRS = { - OnOff.AttributeDefs.start_up_on_off.name: True, - } - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize OnOffClusterHandler.""" - super().__init__(cluster, endpoint) - self._off_listener = None - - if endpoint.device.quirk_id == TUYA_PLUG_ONOFF: - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["backlight_mode"] = True - self.ZCL_INIT_ATTRS["power_on_state"] = True - self.ZCL_INIT_ATTRS["child_lock"] = True - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return not ( - cluster.endpoint.device.manufacturer == "Konke" - and cluster.endpoint.device.model - in ("3AFE280100510001", "3AFE170100510001") - ) - - @property - def on_off(self) -> bool | None: - """Return cached value of on/off attribute.""" - return self.cluster.get(OnOff.AttributeDefs.on_off.name) - - async def turn_on(self) -> None: - """Turn the on off cluster on.""" - result = await self.on() - if result[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to turn on: {result[1]}") - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) - - async def turn_off(self) -> None: - """Turn the on off cluster off.""" - result = await self.off() - if result[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to turn off: {result[1]}") - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - cmd = parse_and_log_command(self, tsn, command_id, args) - - if cmd in ( - OnOff.ServerCommandDefs.off.name, - OnOff.ServerCommandDefs.off_with_effect.name, - ): - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - elif cmd in ( - OnOff.ServerCommandDefs.on.name, - OnOff.ServerCommandDefs.on_with_recall_global_scene.name, - ): - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.true) - elif cmd == OnOff.ServerCommandDefs.on_with_timed_off.name: - should_accept = args[0] - on_time = args[1] - # 0 is always accept 1 is only accept when already on - if should_accept == 0 or (should_accept == 1 and bool(self.on_off)): - if self._off_listener is not None: - self._off_listener() - self._off_listener = None - self.cluster.update_attribute( - OnOff.AttributeDefs.on_off.id, t.Bool.true - ) - if on_time > 0: - self._off_listener = async_call_later( - self._endpoint.device.hass, - (on_time / 10), # value is in 10ths of a second - self.set_to_off, - ) - elif cmd == "toggle": - self.cluster.update_attribute( - OnOff.AttributeDefs.on_off.id, not bool(self.on_off) - ) - - @callback - def set_to_off(self, *_): - """Set the state to off.""" - self._off_listener = None - self.cluster.update_attribute(OnOff.AttributeDefs.on_off.id, t.Bool.false) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - if attrid == OnOff.AttributeDefs.on_off.id: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - OnOff.AttributeDefs.on_off.name, - value, - ) - - async def async_update(self): - """Initialize cluster handler.""" - if self.cluster.is_client: - return - from_cache = not self._endpoint.device.is_mains_powered - self.debug("attempting to update onoff state - from cache: %s", from_cache) - await self.get_attribute_value( - OnOff.AttributeDefs.on_off.id, from_cache=from_cache - ) - await super().async_update() - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OnOffConfiguration.cluster_id) -class OnOffConfigurationClusterHandler(ClusterHandler): - """OnOff Configuration cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) -class OtaClusterHandler(ClusterHandler): - """OTA cluster handler.""" - - BIND: bool = False - - # Some devices have this cluster in the wrong collection (e.g. Third Reality) - ZCL_INIT_ATTRS = { - Ota.AttributeDefs.current_file_version.name: True, - } - - @property - def current_file_version(self) -> int | None: - """Return cached value of current_file_version attribute.""" - return self.cluster.get(Ota.AttributeDefs.current_file_version.name) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Ota.cluster_id) -class OtaClientClusterHandler(ClientClusterHandler): - """OTA client cluster handler.""" - - BIND: bool = False - - ZCL_INIT_ATTRS = { - Ota.AttributeDefs.current_file_version.name: True, - } - - @callback - def attribute_updated(self, attrid: int, value: Any, timestamp: Any) -> None: - """Handle an attribute updated on this cluster.""" - # We intentionally avoid the `ClientClusterHandler` attribute update handler: - # it emits a logbook event on every update, which pollutes the logbook - ClusterHandler.attribute_updated(self, attrid, value, timestamp) - - @property - def current_file_version(self) -> int | None: - """Return cached value of current_file_version attribute.""" - return self.cluster.get(Ota.AttributeDefs.current_file_version.name) - - @callback - def cluster_command( - self, tsn: int, command_id: int, args: list[Any] | None - ) -> None: - """Handle OTA commands.""" - if command_id not in self.cluster.server_commands: - return - - signal_id = self._endpoint.unique_id.split("-")[0] - cmd_name = self.cluster.server_commands[command_id].name - - if cmd_name == Ota.ServerCommandDefs.query_next_image.name: - assert args - - current_file_version = args[3] - self.cluster.update_attribute( - Ota.AttributeDefs.current_file_version.id, current_file_version - ) - self.async_send_signal( - SIGNAL_UPDATE_DEVICE.format(signal_id), current_file_version - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Partition.cluster_id) -class PartitionClusterHandler(ClusterHandler): - """Partition cluster handler.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(PollControl.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PollControl.cluster_id) -class PollControlClusterHandler(ClusterHandler): - """Poll Control cluster handler.""" - - CHECKIN_INTERVAL = 55 * 60 * 4 # 55min - CHECKIN_FAST_POLL_TIMEOUT = 2 * 4 # 2s - LONG_POLL = 6 * 4 # 6s - _IGNORED_MANUFACTURER_ID = { - 4476, - } # IKEA - - async def async_configure_cluster_handler_specific(self) -> None: - """Configure cluster handler: set check-in interval.""" - await self.write_attributes_safe( - {PollControl.AttributeDefs.checkin_interval.name: self.CHECKIN_INTERVAL} - ) - - @callback - def cluster_command( - self, tsn: int, command_id: int, args: list[Any] | None - ) -> None: - """Handle commands received to this cluster.""" - if command_id in self.cluster.client_commands: - cmd_name = self.cluster.client_commands[command_id].name - else: - cmd_name = command_id - - self.debug("Received %s tsn command '%s': %s", tsn, cmd_name, args) - self.zha_send_event(cmd_name, args) - if cmd_name == PollControl.ClientCommandDefs.checkin.name: - self.cluster.create_catching_task(self.check_in_response(tsn)) - - async def check_in_response(self, tsn: int) -> None: - """Respond to checkin command.""" - await self.checkin_response(True, self.CHECKIN_FAST_POLL_TIMEOUT, tsn=tsn) - if self._endpoint.device.manufacturer_code not in self._IGNORED_MANUFACTURER_ID: - await self.set_long_poll_interval(self.LONG_POLL) - await self.fast_poll_stop() - - @callback - def skip_manufacturer_id(self, manufacturer_code: int) -> None: - """Block a specific manufacturer id from changing default polling.""" - self._IGNORED_MANUFACTURER_ID.add(manufacturer_code) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerConfiguration.cluster_id) -class PowerConfigurationClusterHandler(ClusterHandler): - """Cluster handler for the zigbee power configuration cluster.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PowerConfiguration.AttributeDefs.battery_voltage.name, - config=REPORT_CONFIG_BATTERY_SAVE, - ), - AttrReportConfig( - attr=PowerConfiguration.AttributeDefs.battery_percentage_remaining.name, - config=REPORT_CONFIG_BATTERY_SAVE, - ), - ) - - def async_initialize_cluster_handler_specific(self, from_cache: bool) -> Coroutine: - """Initialize cluster handler specific attrs.""" - attributes = [ - PowerConfiguration.AttributeDefs.battery_size.name, - PowerConfiguration.AttributeDefs.battery_quantity.name, - ] - return self.get_attributes( - attributes, from_cache=from_cache, only_cache=from_cache - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PowerProfile.cluster_id) -class PowerProfileClusterHandler(ClusterHandler): - """Power Profile cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RSSILocation.cluster_id) -class RSSILocationClusterHandler(ClusterHandler): - """RSSI Location cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) -class ScenesClientClusterHandler(ClientClusterHandler): - """Scenes cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Scenes.cluster_id) -class ScenesClusterHandler(ClusterHandler): - """Scenes cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Time.cluster_id) -class TimeClusterHandler(ClusterHandler): - """Time cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/helpers.py b/homeassistant/components/zha/core/cluster_handlers/helpers.py deleted file mode 100644 index 46557bf23a8..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/helpers.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Helpers for use with ZHA Zigbee cluster handlers.""" - -from . import ClusterHandler - - -def is_hue_motion_sensor(cluster_handler: ClusterHandler) -> bool: - """Return true if the manufacturer and model match known Hue motion sensor models.""" - return cluster_handler.cluster.endpoint.manufacturer in ( - "Philips", - "Signify Netherlands B.V.", - ) and cluster_handler.cluster.endpoint.model in ( - "SML001", - "SML002", - "SML003", - "SML004", - ) - - -def is_sonoff_presence_sensor(cluster_handler: ClusterHandler) -> bool: - """Return true if the manufacturer and model match known Sonoff sensor models.""" - return cluster_handler.cluster.endpoint.manufacturer in ( - "SONOFF", - ) and cluster_handler.cluster.endpoint.model in ("SNZB-06P",) diff --git a/homeassistant/components/zha/core/cluster_handlers/homeautomation.py b/homeassistant/components/zha/core/cluster_handlers/homeautomation.py deleted file mode 100644 index b287cb98f6a..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/homeautomation.py +++ /dev/null @@ -1,236 +0,0 @@ -"""Home automation cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import enum - -from zigpy.zcl.clusters.homeautomation import ( - ApplianceEventAlerts, - ApplianceIdentification, - ApplianceStatistics, - Diagnostic, - ElectricalMeasurement, - MeterIdentification, -) - -from .. import registries -from ..const import ( - CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceEventAlerts.cluster_id) -class ApplianceEventAlertsClusterHandler(ClusterHandler): - """Appliance Event Alerts cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceIdentification.cluster_id) -class ApplianceIdentificationClusterHandler(ClusterHandler): - """Appliance Identification cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ApplianceStatistics.cluster_id) -class ApplianceStatisticsClusterHandler(ClusterHandler): - """Appliance Statistics cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Diagnostic.cluster_id) -class DiagnosticClusterHandler(ClusterHandler): - """Diagnostic cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(ElectricalMeasurement.cluster_id) -class ElectricalMeasurementClusterHandler(ClusterHandler): - """Cluster handler that polls active power level.""" - - CLUSTER_HANDLER_NAME = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT - - class MeasurementType(enum.IntFlag): - """Measurement types.""" - - ACTIVE_MEASUREMENT = 1 - REACTIVE_MEASUREMENT = 2 - APPARENT_MEASUREMENT = 4 - PHASE_A_MEASUREMENT = 8 - PHASE_B_MEASUREMENT = 16 - PHASE_C_MEASUREMENT = 32 - DC_MEASUREMENT = 64 - HARMONICS_MEASUREMENT = 128 - POWER_QUALITY_MEASUREMENT = 256 - - REPORT_CONFIG = ( - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.active_power.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.active_power_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.apparent_power.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_current.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_current_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_voltage.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.rms_voltage_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.ac_frequency.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=ElectricalMeasurement.AttributeDefs.ac_frequency_max.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - ZCL_INIT_ATTRS = { - ElectricalMeasurement.AttributeDefs.ac_current_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_power_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name: True, - ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.measurement_type.name: True, - ElectricalMeasurement.AttributeDefs.power_divisor.name: True, - ElectricalMeasurement.AttributeDefs.power_multiplier.name: True, - ElectricalMeasurement.AttributeDefs.power_factor.name: True, - } - - async def async_update(self): - """Retrieve latest state.""" - self.debug("async_update") - - # This is a polling cluster handler. Don't allow cache. - attrs = [ - a["attr"] - for a in self.REPORT_CONFIG - if a["attr"] not in self.cluster.unsupported_attributes - ] - result = await self.get_attributes(attrs, from_cache=False, only_cache=False) - if result: - for attr, value in result.items(): - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - self.cluster.find_attribute(attr).id, - attr, - value, - ) - - @property - def ac_current_divisor(self) -> int: - """Return ac current divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_current_divisor.name - ) - or 1 - ) - - @property - def ac_current_multiplier(self) -> int: - """Return ac current multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_current_multiplier.name - ) - or 1 - ) - - @property - def ac_voltage_divisor(self) -> int: - """Return ac voltage divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_voltage_divisor.name - ) - or 1 - ) - - @property - def ac_voltage_multiplier(self) -> int: - """Return ac voltage multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_voltage_multiplier.name - ) - or 1 - ) - - @property - def ac_frequency_divisor(self) -> int: - """Return ac frequency divisor.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_frequency_divisor.name - ) - or 1 - ) - - @property - def ac_frequency_multiplier(self) -> int: - """Return ac frequency multiplier.""" - return ( - self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_frequency_multiplier.name - ) - or 1 - ) - - @property - def ac_power_divisor(self) -> int: - """Return active power divisor.""" - return self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_power_divisor.name, - self.cluster.get(ElectricalMeasurement.AttributeDefs.power_divisor.name) - or 1, - ) - - @property - def ac_power_multiplier(self) -> int: - """Return active power divisor.""" - return self.cluster.get( - ElectricalMeasurement.AttributeDefs.ac_power_multiplier.name, - self.cluster.get(ElectricalMeasurement.AttributeDefs.power_multiplier.name) - or 1, - ) - - @property - def measurement_type(self) -> str | None: - """Return Measurement type.""" - if ( - meas_type := self.cluster.get( - ElectricalMeasurement.AttributeDefs.measurement_type.name - ) - ) is None: - return None - - meas_type = self.MeasurementType(meas_type) - return ", ".join( - m.name - for m in self.MeasurementType - if m in meas_type and m.name is not None - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MeterIdentification.cluster_id) -class MeterIdentificationClusterHandler(ClusterHandler): - """Metering Identification cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/hvac.py b/homeassistant/components/zha/core/cluster_handlers/hvac.py deleted file mode 100644 index 1230549832b..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/hvac.py +++ /dev/null @@ -1,347 +0,0 @@ -"""HVAC cluster handlers module for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -from typing import Any - -from zigpy.zcl.clusters.hvac import ( - Dehumidification, - Fan, - Pump, - Thermostat, - UserInterface, -) - -from homeassistant.core import callback - -from .. import registries -from ..const import ( - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - -REPORT_CONFIG_CLIMATE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 25) -REPORT_CONFIG_CLIMATE_DEMAND = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 5) -REPORT_CONFIG_CLIMATE_DISCRETE = (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 1) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Dehumidification.cluster_id) -class DehumidificationClusterHandler(ClusterHandler): - """Dehumidification cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Fan.cluster_id) -class FanClusterHandler(ClusterHandler): - """Fan cluster handler.""" - - _value_attribute = 0 - - REPORT_CONFIG = ( - AttrReportConfig(attr=Fan.AttributeDefs.fan_mode.name, config=REPORT_CONFIG_OP), - ) - ZCL_INIT_ATTRS = {Fan.AttributeDefs.fan_mode_sequence.name: True} - - @property - def fan_mode(self) -> int | None: - """Return current fan mode.""" - return self.cluster.get(Fan.AttributeDefs.fan_mode.name) - - @property - def fan_mode_sequence(self) -> int | None: - """Return possible fan mode speeds.""" - return self.cluster.get(Fan.AttributeDefs.fan_mode_sequence.name) - - async def async_set_speed(self, value) -> None: - """Set the speed of the fan.""" - await self.write_attributes_safe({Fan.AttributeDefs.fan_mode.name: value}) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self.get_attribute_value( - Fan.AttributeDefs.fan_mode.name, from_cache=False - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from fan cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attr_name == "fan_mode": - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Pump.cluster_id) -class PumpClusterHandler(ClusterHandler): - """Pump cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Thermostat.cluster_id) -class ThermostatClusterHandler(ClusterHandler): - """Thermostat cluster handler.""" - - REPORT_CONFIG: tuple[AttrReportConfig, ...] = ( - AttrReportConfig( - attr=Thermostat.AttributeDefs.local_temperature.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupied_cooling_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupied_heating_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.unoccupied_heating_setpoint.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.running_mode.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.running_state.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.system_mode.name, - config=REPORT_CONFIG_CLIMATE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.occupancy.name, - config=REPORT_CONFIG_CLIMATE_DISCRETE, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.pi_cooling_demand.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - AttrReportConfig( - attr=Thermostat.AttributeDefs.pi_heating_demand.name, - config=REPORT_CONFIG_CLIMATE_DEMAND, - ), - ) - ZCL_INIT_ATTRS: dict[str, bool] = { - Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.ctrl_sequence_of_oper.name: False, - Thermostat.AttributeDefs.max_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.max_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.min_cool_setpoint_limit.name: True, - Thermostat.AttributeDefs.min_heat_setpoint_limit.name: True, - Thermostat.AttributeDefs.local_temperature_calibration.name: True, - Thermostat.AttributeDefs.setpoint_change_source.name: True, - } - - @property - def abs_max_cool_setpoint_limit(self) -> int: - """Absolute maximum cooling setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_max_cool_setpoint_limit.name, 3200 - ) - - @property - def abs_min_cool_setpoint_limit(self) -> int: - """Absolute minimum cooling setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_min_cool_setpoint_limit.name, 1600 - ) - - @property - def abs_max_heat_setpoint_limit(self) -> int: - """Absolute maximum heating setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name, 3000 - ) - - @property - def abs_min_heat_setpoint_limit(self) -> int: - """Absolute minimum heating setpoint.""" - return self.cluster.get( - Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name, 700 - ) - - @property - def ctrl_sequence_of_oper(self) -> int: - """Control Sequence of operations attribute.""" - return self.cluster.get( - Thermostat.AttributeDefs.ctrl_sequence_of_oper.name, 0xFF - ) - - @property - def max_cool_setpoint_limit(self) -> int: - """Maximum cooling setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.max_cool_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_max_cool_setpoint_limit - return sp_limit - - @property - def min_cool_setpoint_limit(self) -> int: - """Minimum cooling setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.min_cool_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_min_cool_setpoint_limit - return sp_limit - - @property - def max_heat_setpoint_limit(self) -> int: - """Maximum heating setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.max_heat_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_max_heat_setpoint_limit - return sp_limit - - @property - def min_heat_setpoint_limit(self) -> int: - """Minimum heating setpoint.""" - sp_limit = self.cluster.get( - Thermostat.AttributeDefs.min_heat_setpoint_limit.name - ) - if sp_limit is None: - return self.abs_min_heat_setpoint_limit - return sp_limit - - @property - def local_temperature(self) -> int | None: - """Thermostat temperature.""" - return self.cluster.get(Thermostat.AttributeDefs.local_temperature.name) - - @property - def occupancy(self) -> int | None: - """Is occupancy detected.""" - return self.cluster.get(Thermostat.AttributeDefs.occupancy.name) - - @property - def occupied_cooling_setpoint(self) -> int | None: - """Temperature when room is occupied.""" - return self.cluster.get(Thermostat.AttributeDefs.occupied_cooling_setpoint.name) - - @property - def occupied_heating_setpoint(self) -> int | None: - """Temperature when room is occupied.""" - return self.cluster.get(Thermostat.AttributeDefs.occupied_heating_setpoint.name) - - @property - def pi_cooling_demand(self) -> int: - """Cooling demand.""" - return self.cluster.get(Thermostat.AttributeDefs.pi_cooling_demand.name) - - @property - def pi_heating_demand(self) -> int: - """Heating demand.""" - return self.cluster.get(Thermostat.AttributeDefs.pi_heating_demand.name) - - @property - def running_mode(self) -> int | None: - """Thermostat running mode.""" - return self.cluster.get(Thermostat.AttributeDefs.running_mode.name) - - @property - def running_state(self) -> int | None: - """Thermostat running state, state of heat, cool, fan relays.""" - return self.cluster.get(Thermostat.AttributeDefs.running_state.name) - - @property - def system_mode(self) -> int | None: - """System mode.""" - return self.cluster.get(Thermostat.AttributeDefs.system_mode.name) - - @property - def unoccupied_cooling_setpoint(self) -> int | None: - """Temperature when room is not occupied.""" - return self.cluster.get( - Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name - ) - - @property - def unoccupied_heating_setpoint(self) -> int | None: - """Temperature when room is not occupied.""" - return self.cluster.get( - Thermostat.AttributeDefs.unoccupied_heating_setpoint.name - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - - async def async_set_operation_mode(self, mode) -> bool: - """Set Operation mode.""" - await self.write_attributes_safe( - {Thermostat.AttributeDefs.system_mode.name: mode} - ) - return True - - async def async_set_heating_setpoint( - self, temperature: int, is_away: bool = False - ) -> bool: - """Set heating setpoint.""" - attr = ( - Thermostat.AttributeDefs.unoccupied_heating_setpoint.name - if is_away - else Thermostat.AttributeDefs.occupied_heating_setpoint.name - ) - await self.write_attributes_safe({attr: temperature}) - return True - - async def async_set_cooling_setpoint( - self, temperature: int, is_away: bool = False - ) -> bool: - """Set cooling setpoint.""" - attr = ( - Thermostat.AttributeDefs.unoccupied_cooling_setpoint.name - if is_away - else Thermostat.AttributeDefs.occupied_cooling_setpoint.name - ) - await self.write_attributes_safe({attr: temperature}) - return True - - async def get_occupancy(self) -> bool | None: - """Get unreportable occupancy attribute.""" - res, fail = await self.read_attributes( - [Thermostat.AttributeDefs.occupancy.name] - ) - self.debug("read 'occupancy' attr, success: %s, fail: %s", res, fail) - if Thermostat.AttributeDefs.occupancy.name not in res: - return None - return bool(self.occupancy) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(UserInterface.cluster_id) -class UserInterfaceClusterHandler(ClusterHandler): - """User interface (thermostat) cluster handler.""" - - ZCL_INIT_ATTRS = {UserInterface.AttributeDefs.keypad_lockout.name: True} diff --git a/homeassistant/components/zha/core/cluster_handlers/lighting.py b/homeassistant/components/zha/core/cluster_handlers/lighting.py deleted file mode 100644 index bde0fdbb0e7..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/lighting.py +++ /dev/null @@ -1,196 +0,0 @@ -"""Lighting cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from functools import cached_property - -from zigpy.zcl.clusters.lighting import Ballast, Color - -from .. import registries -from ..const import REPORT_CONFIG_DEFAULT -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Ballast.cluster_id) -class BallastClusterHandler(ClusterHandler): - """Ballast cluster handler.""" - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) -class ColorClientClusterHandler(ClientClusterHandler): - """Color client cluster handler.""" - - -@registries.BINDABLE_CLUSTERS.register(Color.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Color.cluster_id) -class ColorClusterHandler(ClusterHandler): - """Color cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=Color.AttributeDefs.current_x.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_y.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_hue.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.current_saturation.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Color.AttributeDefs.color_temperature.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - MAX_MIREDS: int = 500 - MIN_MIREDS: int = 153 - ZCL_INIT_ATTRS = { - Color.AttributeDefs.color_mode.name: False, - Color.AttributeDefs.color_temp_physical_min.name: True, - Color.AttributeDefs.color_temp_physical_max.name: True, - Color.AttributeDefs.color_capabilities.name: True, - Color.AttributeDefs.color_loop_active.name: False, - Color.AttributeDefs.enhanced_current_hue.name: False, - Color.AttributeDefs.start_up_color_temperature.name: True, - Color.AttributeDefs.options.name: True, - } - - @cached_property - def color_capabilities(self) -> Color.ColorCapabilities: - """Return ZCL color capabilities of the light.""" - color_capabilities = self.cluster.get( - Color.AttributeDefs.color_capabilities.name - ) - if color_capabilities is None: - return Color.ColorCapabilities.XY_attributes - return Color.ColorCapabilities(color_capabilities) - - @property - def color_mode(self) -> int | None: - """Return cached value of the color_mode attribute.""" - return self.cluster.get(Color.AttributeDefs.color_mode.name) - - @property - def color_loop_active(self) -> int | None: - """Return cached value of the color_loop_active attribute.""" - return self.cluster.get(Color.AttributeDefs.color_loop_active.name) - - @property - def color_temperature(self) -> int | None: - """Return cached value of color temperature.""" - return self.cluster.get(Color.AttributeDefs.color_temperature.name) - - @property - def current_x(self) -> int | None: - """Return cached value of the current_x attribute.""" - return self.cluster.get(Color.AttributeDefs.current_x.name) - - @property - def current_y(self) -> int | None: - """Return cached value of the current_y attribute.""" - return self.cluster.get(Color.AttributeDefs.current_y.name) - - @property - def current_hue(self) -> int | None: - """Return cached value of the current_hue attribute.""" - return self.cluster.get(Color.AttributeDefs.current_hue.name) - - @property - def enhanced_current_hue(self) -> int | None: - """Return cached value of the enhanced_current_hue attribute.""" - return self.cluster.get(Color.AttributeDefs.enhanced_current_hue.name) - - @property - def current_saturation(self) -> int | None: - """Return cached value of the current_saturation attribute.""" - return self.cluster.get(Color.AttributeDefs.current_saturation.name) - - @property - def min_mireds(self) -> int: - """Return the coldest color_temp that this cluster handler supports.""" - min_mireds = self.cluster.get( - Color.AttributeDefs.color_temp_physical_min.name, self.MIN_MIREDS - ) - if min_mireds == 0: - self.warning( - ( - "[Min mireds is 0, setting to %s] Please open an issue on the" - " quirks repo to have this device corrected" - ), - self.MIN_MIREDS, - ) - min_mireds = self.MIN_MIREDS - return min_mireds - - @property - def max_mireds(self) -> int: - """Return the warmest color_temp that this cluster handler supports.""" - max_mireds = self.cluster.get( - Color.AttributeDefs.color_temp_physical_max.name, self.MAX_MIREDS - ) - if max_mireds == 0: - self.warning( - ( - "[Max mireds is 0, setting to %s] Please open an issue on the" - " quirks repo to have this device corrected" - ), - self.MAX_MIREDS, - ) - max_mireds = self.MAX_MIREDS - return max_mireds - - @property - def hs_supported(self) -> bool: - """Return True if the cluster handler supports hue and saturation.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Hue_and_saturation in self.color_capabilities - ) - - @property - def enhanced_hue_supported(self) -> bool: - """Return True if the cluster handler supports enhanced hue and saturation.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Enhanced_hue in self.color_capabilities - ) - - @property - def xy_supported(self) -> bool: - """Return True if the cluster handler supports xy.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.XY_attributes in self.color_capabilities - ) - - @property - def color_temp_supported(self) -> bool: - """Return True if the cluster handler supports color temperature.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Color_temperature in self.color_capabilities - ) or self.color_temperature is not None - - @property - def color_loop_supported(self) -> bool: - """Return True if the cluster handler supports color loop.""" - return ( - self.color_capabilities is not None - and Color.ColorCapabilities.Color_loop in self.color_capabilities - ) - - @property - def options(self) -> Color.Options: - """Return ZCL options of the cluster handler.""" - return Color.Options(self.cluster.get(Color.AttributeDefs.options.name, 0)) - - @property - def execute_if_off_supported(self) -> bool: - """Return True if the cluster handler can execute commands when off.""" - return Color.Options.Execute_if_off in self.options diff --git a/homeassistant/components/zha/core/cluster_handlers/lightlink.py b/homeassistant/components/zha/core/cluster_handlers/lightlink.py deleted file mode 100644 index 85ec6905069..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/lightlink.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Lightlink cluster handlers module for Zigbee Home Automation.""" - -import zigpy.exceptions -from zigpy.zcl.clusters.lightlink import LightLink -from zigpy.zcl.foundation import GENERAL_COMMANDS, GeneralCommand - -from .. import registries -from . import ClusterHandler, ClusterHandlerStatus - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(LightLink.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LightLink.cluster_id) -class LightLinkClusterHandler(ClusterHandler): - """Lightlink cluster handler.""" - - BIND: bool = False - - async def async_configure(self) -> None: - """Add Coordinator to LightLink group.""" - - if self._endpoint.device.skip_configuration: - self._status = ClusterHandlerStatus.CONFIGURED - return - - application = self._endpoint.zigpy_endpoint.device.application - try: - coordinator = application.get_device(application.state.node_info.ieee) - except KeyError: - self.warning("Aborting - unable to locate required coordinator device.") - return - - try: - rsp = await self.cluster.get_group_identifiers(0) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as exc: - self.warning("Couldn't get list of groups: %s", str(exc)) - return - - if isinstance(rsp, GENERAL_COMMANDS[GeneralCommand.Default_Response].schema): - groups = [] - else: - groups = rsp.group_info_records - - if groups: - for group in groups: - self.debug("Adding coordinator to 0x%04x group id", group.group_id) - await coordinator.add_to_group(group.group_id) - else: - await coordinator.add_to_group(0x0000, name="Default Lightlink Group") diff --git a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py b/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py deleted file mode 100644 index 9d5d68d2c7e..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/manufacturerspecific.py +++ /dev/null @@ -1,515 +0,0 @@ -"""Manufacturer specific cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import logging -from typing import TYPE_CHECKING, Any - -from zhaquirks.inovelli.types import AllLEDEffectType, SingleLEDEffectType -from zhaquirks.quirk_ids import ( - DANFOSS_ALLY_THERMOSTAT, - TUYA_PLUG_MANUFACTURER, - XIAOMI_AQARA_VIBRATION_AQ1, -) -import zigpy.zcl -from zigpy.zcl import clusters -from zigpy.zcl.clusters.closures import DoorLock - -from homeassistant.core import callback - -from .. import registries -from ..const import ( - ATTR_ATTRIBUTE_ID, - ATTR_ATTRIBUTE_NAME, - ATTR_VALUE, - REPORT_CONFIG_ASAP, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, - SIGNAL_ATTR_UPDATED, - UNKNOWN, -) -from . import AttrReportConfig, ClientClusterHandler, ClusterHandler -from .general import MultistateInputClusterHandler -from .homeautomation import DiagnosticClusterHandler -from .hvac import ThermostatClusterHandler, UserInterfaceClusterHandler - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -_LOGGER = logging.getLogger(__name__) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.SMARTTHINGS_HUMIDITY_CLUSTER -) -class SmartThingsHumidityClusterHandler(ClusterHandler): - """Smart Things Humidity cluster handler.""" - - REPORT_CONFIG = ( - { - "attr": "measured_value", - "config": (REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - }, - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFD00) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFD00) -class OsramButtonClusterHandler(ClusterHandler): - """Osram button cluster handler.""" - - REPORT_CONFIG = () - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.PHILLIPS_REMOTE_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(registries.PHILLIPS_REMOTE_CLUSTER) -class PhillipsRemoteClusterHandler(ClusterHandler): - """Phillips remote cluster handler.""" - - REPORT_CONFIG = () - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.TUYA_MANUFACTURER_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.TUYA_MANUFACTURER_CLUSTER -) -class TuyaClusterHandler(ClusterHandler): - """Cluster handler for the Tuya manufacturer Zigbee cluster.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize TuyaClusterHandler.""" - super().__init__(cluster, endpoint) - if endpoint.device.quirk_id == TUYA_PLUG_MANUFACTURER: - self.ZCL_INIT_ATTRS = { - "backlight_mode": True, - "power_on_state": True, - } - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFCC0) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFCC0) -class OppleRemoteClusterHandler(ClusterHandler): - """Opple cluster handler.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Opple cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "lumi.motion.ac02": - self.ZCL_INIT_ATTRS = { - "detection_interval": True, - "motion_sensitivity": True, - "trigger_indicator": True, - } - elif self.cluster.endpoint.model == "lumi.motion.agl04": - self.ZCL_INIT_ATTRS = { - "detection_interval": True, - "motion_sensitivity": True, - } - elif self.cluster.endpoint.model == "lumi.motion.ac01": - self.ZCL_INIT_ATTRS = { - "presence": True, - "monitoring_mode": True, - "motion_sensitivity": True, - "approach_distance": True, - } - elif self.cluster.endpoint.model in ("lumi.plug.mmeu01", "lumi.plug.maeu01"): - self.ZCL_INIT_ATTRS = { - "power_outage_memory": True, - "consumer_connected": True, - } - elif self.cluster.endpoint.model == "aqara.feeder.acn001": - self.ZCL_INIT_ATTRS = { - "portions_dispensed": True, - "weight_dispensed": True, - "error_detected": True, - "disable_led_indicator": True, - "child_lock": True, - "feeding_mode": True, - "serving_size": True, - "portion_weight": True, - } - elif self.cluster.endpoint.model == "lumi.airrtc.agl001": - self.ZCL_INIT_ATTRS = { - "system_mode": True, - "preset": True, - "window_detection": True, - "valve_detection": True, - "valve_alarm": True, - "child_lock": True, - "away_preset_temperature": True, - "window_open": True, - "calibrated": True, - "schedule": True, - "sensor": True, - } - elif self.cluster.endpoint.model == "lumi.sensor_smoke.acn03": - self.ZCL_INIT_ATTRS = { - "buzzer_manual_mute": True, - "smoke_density": True, - "heartbeat_indicator": True, - "buzzer_manual_alarm": True, - "buzzer": True, - "linkage_alarm": True, - } - elif self.cluster.endpoint.model == "lumi.magnet.ac01": - self.ZCL_INIT_ATTRS = { - "detection_distance": True, - } - elif self.cluster.endpoint.model == "lumi.switch.acn047": - self.ZCL_INIT_ATTRS = { - "switch_mode": True, - "switch_type": True, - "startup_on_off": True, - "decoupled_mode": True, - } - elif self.cluster.endpoint.model == "lumi.curtain.agl001": - self.ZCL_INIT_ATTRS = { - "hooks_state": True, - "hooks_lock": True, - "positions_stored": True, - "light_level": True, - "hand_open": True, - } - - async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: - """Initialize cluster handler specific.""" - if self.cluster.endpoint.model in ("lumi.motion.ac02", "lumi.motion.agl04"): - interval = self.cluster.get("detection_interval", self.cluster.get(0x0102)) - if interval is not None: - self.debug("Loaded detection interval at startup: %s", interval) - self.cluster.endpoint.ias_zone.reset_s = int(interval) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.SMARTTHINGS_ACCELERATION_CLUSTER -) -class SmartThingsAccelerationClusterHandler(ClusterHandler): - """Smart Things Acceleration cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig(attr="acceleration", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="x_axis", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="y_axis", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="z_axis", config=REPORT_CONFIG_ASAP), - ) - - @classmethod - def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool: - """Filter the cluster match for specific devices.""" - return cluster.endpoint.device.manufacturer in ( - "CentraLite", - "Samjin", - "SmartThings", - ) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - try: - attr_name = self._cluster.attributes[attrid].name - except KeyError: - attr_name = UNKNOWN - - if attrid == self.value_attribute: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - attr_name, - value, - ) - return - - self.zha_send_event( - SIGNAL_ATTR_UPDATED, - { - ATTR_ATTRIBUTE_ID: attrid, - ATTR_ATTRIBUTE_NAME: attr_name, - ATTR_VALUE: value, - }, - ) - - -@registries.CLIENT_CLUSTER_HANDLER_REGISTRY.register(0xFC31) -class InovelliNotificationClientClusterHandler(ClientClusterHandler): - """Inovelli Notification cluster handler.""" - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle an attribute updated on this cluster.""" - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle a cluster command received on this cluster.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC31) -class InovelliConfigEntityClusterHandler(ClusterHandler): - """Inovelli Configuration Entity cluster handler.""" - - REPORT_CONFIG = () - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Inovelli cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "VZM31-SN": - self.ZCL_INIT_ATTRS = { - "dimming_speed_up_remote": True, - "dimming_speed_up_local": True, - "ramp_rate_off_to_on_local": True, - "ramp_rate_off_to_on_remote": True, - "dimming_speed_down_remote": True, - "dimming_speed_down_local": True, - "ramp_rate_on_to_off_local": True, - "ramp_rate_on_to_off_remote": True, - "minimum_level": True, - "maximum_level": True, - "invert_switch": True, - "auto_off_timer": True, - "default_level_local": True, - "default_level_remote": True, - "state_after_power_restored": True, - "load_level_indicator_timeout": True, - "active_power_reports": True, - "periodic_power_and_energy_reports": True, - "active_energy_reports": True, - "power_type": False, - "switch_type": False, - "increased_non_neutral_output": True, - "button_delay": False, - "smart_bulb_mode": False, - "double_tap_up_enabled": True, - "double_tap_down_enabled": True, - "double_tap_up_level": True, - "double_tap_down_level": True, - "led_color_when_on": True, - "led_color_when_off": True, - "led_intensity_when_on": True, - "led_intensity_when_off": True, - "led_scaling_mode": True, - "aux_switch_scenes": True, - "binding_off_to_on_sync_level": True, - "local_protection": False, - "output_mode": False, - "on_off_led_mode": True, - "firmware_progress_led": True, - "relay_click_in_on_off_mode": True, - "disable_clear_notifications_double_tap": True, - } - elif self.cluster.endpoint.model == "VZM35-SN": - self.ZCL_INIT_ATTRS = { - "dimming_speed_up_remote": True, - "dimming_speed_up_local": True, - "ramp_rate_off_to_on_local": True, - "ramp_rate_off_to_on_remote": True, - "dimming_speed_down_remote": True, - "dimming_speed_down_local": True, - "ramp_rate_on_to_off_local": True, - "ramp_rate_on_to_off_remote": True, - "minimum_level": True, - "maximum_level": True, - "invert_switch": True, - "auto_off_timer": True, - "default_level_local": True, - "default_level_remote": True, - "state_after_power_restored": True, - "load_level_indicator_timeout": True, - "power_type": False, - "switch_type": False, - "non_neutral_aux_med_gear_learn_value": True, - "non_neutral_aux_low_gear_learn_value": True, - "quick_start_time": False, - "button_delay": False, - "smart_fan_mode": False, - "double_tap_up_enabled": True, - "double_tap_down_enabled": True, - "double_tap_up_level": True, - "double_tap_down_level": True, - "led_color_when_on": True, - "led_color_when_off": True, - "led_intensity_when_on": True, - "led_intensity_when_off": True, - "aux_switch_scenes": True, - "local_protection": False, - "output_mode": False, - "on_off_led_mode": True, - "firmware_progress_led": True, - "smart_fan_led_display_levels": True, - } - - async def issue_all_led_effect( - self, - effect_type: AllLEDEffectType | int = AllLEDEffectType.Fast_Blink, - color: int = 200, - level: int = 100, - duration: int = 3, - **kwargs: Any, - ) -> None: - """Issue all LED effect command. - - This command is used to issue an LED effect to all LEDs on the device. - """ - - await self.led_effect(effect_type, color, level, duration, expect_reply=False) - - async def issue_individual_led_effect( - self, - led_number: int = 1, - effect_type: SingleLEDEffectType | int = SingleLEDEffectType.Fast_Blink, - color: int = 200, - level: int = 100, - duration: int = 3, - **kwargs: Any, - ) -> None: - """Issue individual LED effect command. - - This command is used to issue an LED effect to the specified LED on the device. - """ - - await self.individual_led_effect( - led_number, effect_type, color, level, duration, expect_reply=False - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(registries.IKEA_AIR_PURIFIER_CLUSTER) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - registries.IKEA_AIR_PURIFIER_CLUSTER -) -class IkeaAirPurifierClusterHandler(ClusterHandler): - """IKEA Air Purifier cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig(attr="filter_run_time", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="replace_filter", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="filter_life_time", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="disable_led", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="air_quality_25pm", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="child_lock", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="fan_mode", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="fan_speed", config=REPORT_CONFIG_IMMEDIATE), - AttrReportConfig(attr="device_run_time", config=REPORT_CONFIG_DEFAULT), - ) - - @property - def fan_mode(self) -> int | None: - """Return current fan mode.""" - return self.cluster.get("fan_mode") - - @property - def fan_mode_sequence(self) -> int | None: - """Return possible fan mode speeds.""" - return self.cluster.get("fan_mode_sequence") - - async def async_set_speed(self, value) -> None: - """Set the speed of the fan.""" - await self.write_attributes_safe({"fan_mode": value}) - - async def async_update(self) -> None: - """Retrieve latest state.""" - await self.get_attribute_value("fan_mode", from_cache=False) - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute update from fan cluster.""" - attr_name = self._get_attribute_name(attrid) - self.debug( - "Attribute report '%s'[%s] = %s", self.cluster.name, attr_name, value - ) - if attr_name == "fan_mode": - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", attrid, attr_name, value - ) - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC80) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC80) -class IkeaRemoteClusterHandler(ClusterHandler): - """Ikea Matter remote cluster handler.""" - - REPORT_CONFIG = () - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - DoorLock.cluster_id, XIAOMI_AQARA_VIBRATION_AQ1 -) -class XiaomiVibrationAQ1ClusterHandler(MultistateInputClusterHandler): - """Xiaomi DoorLock Cluster is in fact a MultiStateInput Cluster.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(0xFC11) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(0xFC11) -class SonoffPresenceSenorClusterHandler(ClusterHandler): - """SonoffPresenceSensor cluster handler.""" - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize SonoffPresenceSensor cluster handler.""" - super().__init__(cluster, endpoint) - if self.cluster.endpoint.model == "SNZB-06P": - self.ZCL_INIT_ATTRS = {"last_illumination_state": True} - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.hvac.Thermostat.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossThermostatClusterHandler(ThermostatClusterHandler): - """Thermostat cluster handler for the Danfoss TRV and derivatives.""" - - REPORT_CONFIG = ( - *ThermostatClusterHandler.REPORT_CONFIG, - AttrReportConfig(attr="open_window_detection", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="heat_required", config=REPORT_CONFIG_ASAP), - AttrReportConfig(attr="mounting_mode_active", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="load_estimate", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="adaptation_run_status", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="preheat_status", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="preheat_time", config=REPORT_CONFIG_DEFAULT), - ) - - ZCL_INIT_ATTRS = { - **ThermostatClusterHandler.ZCL_INIT_ATTRS, - "external_open_window_detected": True, - "window_open_feature": True, - "exercise_day_of_week": True, - "exercise_trigger_time": True, - "mounting_mode_control": False, # Can change - "orientation": True, - "external_measured_room_sensor": False, # Can change - "radiator_covered": True, - "heat_available": True, - "load_balancing_enable": True, - "load_room_mean": False, # Can change - "control_algorithm_scale_factor": True, - "regulation_setpoint_offset": True, - "adaptation_run_control": True, - "adaptation_run_settings": True, - } - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.hvac.UserInterface.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossUserInterfaceClusterHandler(UserInterfaceClusterHandler): - """Interface cluster handler for the Danfoss TRV and derivatives.""" - - ZCL_INIT_ATTRS = { - **UserInterfaceClusterHandler.ZCL_INIT_ATTRS, - "viewing_direction": True, - } - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - clusters.homeautomation.Diagnostic.cluster_id, DANFOSS_ALLY_THERMOSTAT -) -class DanfossDiagnosticClusterHandler(DiagnosticClusterHandler): - """Diagnostic cluster handler for the Danfoss TRV and derivatives.""" - - REPORT_CONFIG = ( - *DiagnosticClusterHandler.REPORT_CONFIG, - AttrReportConfig(attr="sw_error_code", config=REPORT_CONFIG_DEFAULT), - AttrReportConfig(attr="motor_step_counter", config=REPORT_CONFIG_DEFAULT), - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/measurement.py b/homeassistant/components/zha/core/cluster_handlers/measurement.py deleted file mode 100644 index 768de8c4c73..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/measurement.py +++ /dev/null @@ -1,208 +0,0 @@ -"""Measurement cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -from typing import TYPE_CHECKING - -import zigpy.zcl -from zigpy.zcl.clusters.measurement import ( - PM25, - CarbonDioxideConcentration, - CarbonMonoxideConcentration, - FlowMeasurement, - FormaldehydeConcentration, - IlluminanceLevelSensing, - IlluminanceMeasurement, - LeafWetness, - OccupancySensing, - PressureMeasurement, - RelativeHumidity, - SoilMoisture, - TemperatureMeasurement, -) - -from .. import registries -from ..const import ( - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_MIN_INT, -) -from . import AttrReportConfig, ClusterHandler -from .helpers import is_hue_motion_sensor, is_sonoff_presence_sensor - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(FlowMeasurement.cluster_id) -class FlowMeasurementClusterHandler(ClusterHandler): - """Flow Measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=FlowMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceLevelSensing.cluster_id) -class IlluminanceLevelSensingClusterHandler(ClusterHandler): - """Illuminance Level Sensing cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=IlluminanceLevelSensing.AttributeDefs.level_status.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IlluminanceMeasurement.cluster_id) -class IlluminanceMeasurementClusterHandler(ClusterHandler): - """Illuminance Measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=IlluminanceMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(OccupancySensing.cluster_id) -class OccupancySensingClusterHandler(ClusterHandler): - """Occupancy Sensing cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=OccupancySensing.AttributeDefs.occupancy.name, - config=REPORT_CONFIG_IMMEDIATE, - ), - ) - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Occupancy cluster handler.""" - super().__init__(cluster, endpoint) - if is_hue_motion_sensor(self): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["sensitivity"] = True - if is_sonoff_presence_sensor(self): - self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() - self.ZCL_INIT_ATTRS["ultrasonic_o_to_u_delay"] = True - self.ZCL_INIT_ATTRS["ultrasonic_u_to_o_threshold"] = True - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PressureMeasurement.cluster_id) -class PressureMeasurementClusterHandler(ClusterHandler): - """Pressure measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PressureMeasurement.AttributeDefs.measured_value.name, - config=REPORT_CONFIG_DEFAULT, - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(RelativeHumidity.cluster_id) -class RelativeHumidityClusterHandler(ClusterHandler): - """Relative Humidity measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=RelativeHumidity.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(SoilMoisture.cluster_id) -class SoilMoistureClusterHandler(ClusterHandler): - """Soil Moisture measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=SoilMoisture.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(LeafWetness.cluster_id) -class LeafWetnessClusterHandler(ClusterHandler): - """Leaf Wetness measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=LeafWetness.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 100), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(TemperatureMeasurement.cluster_id) -class TemperatureMeasurementClusterHandler(ClusterHandler): - """Temperature measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=TemperatureMeasurement.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 50), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - CarbonMonoxideConcentration.cluster_id -) -class CarbonMonoxideConcentrationClusterHandler(ClusterHandler): - """Carbon Monoxide measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=CarbonMonoxideConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - CarbonDioxideConcentration.cluster_id -) -class CarbonDioxideConcentrationClusterHandler(ClusterHandler): - """Carbon Dioxide measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=CarbonDioxideConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(PM25.cluster_id) -class PM25ClusterHandler(ClusterHandler): - """Particulate Matter 2.5 microns or less measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=PM25.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.1), - ), - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - FormaldehydeConcentration.cluster_id -) -class FormaldehydeConcentrationClusterHandler(ClusterHandler): - """Formaldehyde measurement cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=FormaldehydeConcentration.AttributeDefs.measured_value.name, - config=(REPORT_CONFIG_MIN_INT, REPORT_CONFIG_MAX_INT, 0.000001), - ), - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/protocol.py b/homeassistant/components/zha/core/cluster_handlers/protocol.py deleted file mode 100644 index e1e3d7a5413..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/protocol.py +++ /dev/null @@ -1,129 +0,0 @@ -"""Protocol cluster handlers module for Zigbee Home Automation.""" - -from zigpy.zcl.clusters.protocol import ( - AnalogInputExtended, - AnalogInputRegular, - AnalogOutputExtended, - AnalogOutputRegular, - AnalogValueExtended, - AnalogValueRegular, - BacnetProtocolTunnel, - BinaryInputExtended, - BinaryInputRegular, - BinaryOutputExtended, - BinaryOutputRegular, - BinaryValueExtended, - BinaryValueRegular, - GenericTunnel, - MultistateInputExtended, - MultistateInputRegular, - MultistateOutputExtended, - MultistateOutputRegular, - MultistateValueExtended, - MultistateValueRegular, -) - -from .. import registries -from . import ClusterHandler - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputExtended.cluster_id) -class AnalogInputExtendedClusterHandler(ClusterHandler): - """Analog Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogInputRegular.cluster_id) -class AnalogInputRegularClusterHandler(ClusterHandler): - """Analog Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputExtended.cluster_id) -class AnalogOutputExtendedClusterHandler(ClusterHandler): - """Analog Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogOutputRegular.cluster_id) -class AnalogOutputRegularClusterHandler(ClusterHandler): - """Analog Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueExtended.cluster_id) -class AnalogValueExtendedClusterHandler(ClusterHandler): - """Analog Value Extended edition cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AnalogValueRegular.cluster_id) -class AnalogValueRegularClusterHandler(ClusterHandler): - """Analog Value Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BacnetProtocolTunnel.cluster_id) -class BacnetProtocolTunnelClusterHandler(ClusterHandler): - """Bacnet Protocol Tunnel cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputExtended.cluster_id) -class BinaryInputExtendedClusterHandler(ClusterHandler): - """Binary Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryInputRegular.cluster_id) -class BinaryInputRegularClusterHandler(ClusterHandler): - """Binary Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputExtended.cluster_id) -class BinaryOutputExtendedClusterHandler(ClusterHandler): - """Binary Output Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryOutputRegular.cluster_id) -class BinaryOutputRegularClusterHandler(ClusterHandler): - """Binary Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueExtended.cluster_id) -class BinaryValueExtendedClusterHandler(ClusterHandler): - """Binary Value Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(BinaryValueRegular.cluster_id) -class BinaryValueRegularClusterHandler(ClusterHandler): - """Binary Value Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(GenericTunnel.cluster_id) -class GenericTunnelClusterHandler(ClusterHandler): - """Generic Tunnel cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputExtended.cluster_id) -class MultiStateInputExtendedClusterHandler(ClusterHandler): - """Multistate Input Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateInputRegular.cluster_id) -class MultiStateInputRegularClusterHandler(ClusterHandler): - """Multistate Input Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register( - MultistateOutputExtended.cluster_id -) -class MultiStateOutputExtendedClusterHandler(ClusterHandler): - """Multistate Output Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateOutputRegular.cluster_id) -class MultiStateOutputRegularClusterHandler(ClusterHandler): - """Multistate Output Regular cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueExtended.cluster_id) -class MultiStateValueExtendedClusterHandler(ClusterHandler): - """Multistate Value Extended cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MultistateValueRegular.cluster_id) -class MultiStateValueRegularClusterHandler(ClusterHandler): - """Multistate Value Regular cluster handler.""" diff --git a/homeassistant/components/zha/core/cluster_handlers/security.py b/homeassistant/components/zha/core/cluster_handlers/security.py deleted file mode 100644 index 8ebe09cef03..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/security.py +++ /dev/null @@ -1,400 +0,0 @@ -"""Security cluster handlers module for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -from collections.abc import Callable -from typing import TYPE_CHECKING, Any - -import zigpy.zcl -from zigpy.zcl.clusters.security import IasAce as AceCluster, IasWd, IasZone - -from homeassistant.core import callback -from homeassistant.exceptions import HomeAssistantError - -from .. import registries -from ..const import ( - SIGNAL_ATTR_UPDATED, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_SQUAWK_MODE_ARMED, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_YES, -) -from . import ClusterHandler, ClusterHandlerStatus - -if TYPE_CHECKING: - from ..endpoint import Endpoint - -SIGNAL_ARMED_STATE_CHANGED = "zha_armed_state_changed" -SIGNAL_ALARM_TRIGGERED = "zha_armed_triggered" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(AceCluster.cluster_id) -class IasAceClusterHandler(ClusterHandler): - """IAS Ancillary Control Equipment cluster handler.""" - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize IAS Ancillary Control Equipment cluster handler.""" - super().__init__(cluster, endpoint) - self.command_map: dict[int, Callable[..., Any]] = { - AceCluster.ServerCommandDefs.arm.id: self.arm, - AceCluster.ServerCommandDefs.bypass.id: self._bypass, - AceCluster.ServerCommandDefs.emergency.id: self._emergency, - AceCluster.ServerCommandDefs.fire.id: self._fire, - AceCluster.ServerCommandDefs.panic.id: self._panic, - AceCluster.ServerCommandDefs.get_zone_id_map.id: self._get_zone_id_map, - AceCluster.ServerCommandDefs.get_zone_info.id: self._get_zone_info, - AceCluster.ServerCommandDefs.get_panel_status.id: self._send_panel_status_response, - AceCluster.ServerCommandDefs.get_bypassed_zone_list.id: self._get_bypassed_zone_list, - AceCluster.ServerCommandDefs.get_zone_status.id: self._get_zone_status, - } - self.arm_map: dict[AceCluster.ArmMode, Callable[..., Any]] = { - AceCluster.ArmMode.Disarm: self._disarm, - AceCluster.ArmMode.Arm_All_Zones: self._arm_away, - AceCluster.ArmMode.Arm_Day_Home_Only: self._arm_day, - AceCluster.ArmMode.Arm_Night_Sleep_Only: self._arm_night, - } - self.armed_state: AceCluster.PanelStatus = AceCluster.PanelStatus.Panel_Disarmed - self.invalid_tries: int = 0 - - # These will all be setup by the entity from ZHA configuration - self.panel_code: str = "1234" - self.code_required_arm_actions = False - self.max_invalid_tries: int = 3 - - # where do we store this to handle restarts - self.alarm_status: AceCluster.AlarmStatus = AceCluster.AlarmStatus.No_Alarm - - @callback - def cluster_command(self, tsn, command_id, args) -> None: - """Handle commands received to this cluster.""" - self.debug( - "received command %s", self._cluster.server_commands[command_id].name - ) - self.command_map[command_id](*args) - - def arm(self, arm_mode: int, code: str | None, zone_id: int) -> None: - """Handle the IAS ACE arm command.""" - mode = AceCluster.ArmMode(arm_mode) - - self.zha_send_event( - AceCluster.ServerCommandDefs.arm.name, - { - "arm_mode": mode.value, - "arm_mode_description": mode.name, - "code": code, - "zone_id": zone_id, - }, - ) - - zigbee_reply = self.arm_map[mode](code) - self._endpoint.device.hass.async_create_task(zigbee_reply) - - if self.invalid_tries >= self.max_invalid_tries: - self.alarm_status = AceCluster.AlarmStatus.Emergency - self.armed_state = AceCluster.PanelStatus.In_Alarm - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") - else: - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ARMED_STATE_CHANGED}") - self._send_panel_status_changed() - - def _disarm(self, code: str): - """Test the code and disarm the panel if the code is correct.""" - if ( - code != self.panel_code - and self.armed_state != AceCluster.PanelStatus.Panel_Disarmed - ): - self.debug("Invalid code supplied to IAS ACE") - self.invalid_tries += 1 - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Invalid_Arm_Disarm_Code - ) - else: - self.invalid_tries = 0 - if ( - self.armed_state == AceCluster.PanelStatus.Panel_Disarmed - and self.alarm_status == AceCluster.AlarmStatus.No_Alarm - ): - self.debug("IAS ACE already disarmed") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Already_Disarmed - ) - else: - self.debug("Disarming all IAS ACE zones") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.All_Zones_Disarmed - ) - - self.armed_state = AceCluster.PanelStatus.Panel_Disarmed - self.alarm_status = AceCluster.AlarmStatus.No_Alarm - return zigbee_reply - - def _arm_day(self, code: str) -> None: - """Arm the panel for day / home zones.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Stay, - AceCluster.ArmNotification.Only_Day_Home_Zones_Armed, - ) - - def _arm_night(self, code: str) -> None: - """Arm the panel for night / sleep zones.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Night, - AceCluster.ArmNotification.Only_Night_Sleep_Zones_Armed, - ) - - def _arm_away(self, code: str) -> None: - """Arm the panel for away mode.""" - return self._handle_arm( - code, - AceCluster.PanelStatus.Armed_Away, - AceCluster.ArmNotification.All_Zones_Armed, - ) - - def _handle_arm( - self, - code: str, - panel_status: AceCluster.PanelStatus, - armed_type: AceCluster.ArmNotification, - ) -> None: - """Arm the panel with the specified statuses.""" - if self.code_required_arm_actions and code != self.panel_code: - self.debug("Invalid code supplied to IAS ACE") - zigbee_reply = self.arm_response( - AceCluster.ArmNotification.Invalid_Arm_Disarm_Code - ) - else: - self.debug("Arming all IAS ACE zones") - self.armed_state = panel_status - zigbee_reply = self.arm_response(armed_type) - return zigbee_reply - - def _bypass(self, zone_list, code) -> None: - """Handle the IAS ACE bypass command.""" - self.zha_send_event( - AceCluster.ServerCommandDefs.bypass.name, - {"zone_list": zone_list, "code": code}, - ) - - def _emergency(self) -> None: - """Handle the IAS ACE emergency command.""" - self._set_alarm(AceCluster.AlarmStatus.Emergency) - - def _fire(self) -> None: - """Handle the IAS ACE fire command.""" - self._set_alarm(AceCluster.AlarmStatus.Fire) - - def _panic(self) -> None: - """Handle the IAS ACE panic command.""" - self._set_alarm(AceCluster.AlarmStatus.Emergency_Panic) - - def _set_alarm(self, status: AceCluster.AlarmStatus) -> None: - """Set the specified alarm status.""" - self.alarm_status = status - self.armed_state = AceCluster.PanelStatus.In_Alarm - self.async_send_signal(f"{self.unique_id}_{SIGNAL_ALARM_TRIGGERED}") - self._send_panel_status_changed() - - def _get_zone_id_map(self): - """Handle the IAS ACE zone id map command.""" - - def _get_zone_info(self, zone_id): - """Handle the IAS ACE zone info command.""" - - def _send_panel_status_response(self) -> None: - """Handle the IAS ACE panel status response command.""" - response = self.panel_status_response( - self.armed_state, - 0x00, - AceCluster.AudibleNotification.Default_Sound, - self.alarm_status, - ) - self._endpoint.device.hass.async_create_task(response) - - def _send_panel_status_changed(self) -> None: - """Handle the IAS ACE panel status changed command.""" - response = self.panel_status_changed( - self.armed_state, - 0x00, - AceCluster.AudibleNotification.Default_Sound, - self.alarm_status, - ) - self._endpoint.device.hass.async_create_task(response) - - def _get_bypassed_zone_list(self): - """Handle the IAS ACE bypassed zone list command.""" - - def _get_zone_status( - self, starting_zone_id, max_zone_ids, zone_status_mask_flag, zone_status_mask - ): - """Handle the IAS ACE zone status command.""" - - -@registries.CLUSTER_HANDLER_ONLY_CLUSTERS.register(IasWd.cluster_id) -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasWd.cluster_id) -class IasWdClusterHandler(ClusterHandler): - """IAS Warning Device cluster handler.""" - - @staticmethod - def set_bit(destination_value, destination_bit, source_value, source_bit): - """Set the specified bit in the value.""" - - if IasWdClusterHandler.get_bit(source_value, source_bit): - return destination_value | (1 << destination_bit) - return destination_value - - @staticmethod - def get_bit(value, bit): - """Get the specified bit from the value.""" - return (value & (1 << bit)) != 0 - - async def issue_squawk( - self, - mode=WARNING_DEVICE_SQUAWK_MODE_ARMED, - strobe=WARNING_DEVICE_STROBE_YES, - squawk_level=WARNING_DEVICE_SOUND_HIGH, - ): - """Issue a squawk command. - - This command uses the WD capabilities to emit a quick audible/visible - pulse called a "squawk". The squawk command has no effect if the WD - is currently active (warning in progress). - """ - value = 0 - value = IasWdClusterHandler.set_bit(value, 0, squawk_level, 0) - value = IasWdClusterHandler.set_bit(value, 1, squawk_level, 1) - - value = IasWdClusterHandler.set_bit(value, 3, strobe, 0) - - value = IasWdClusterHandler.set_bit(value, 4, mode, 0) - value = IasWdClusterHandler.set_bit(value, 5, mode, 1) - value = IasWdClusterHandler.set_bit(value, 6, mode, 2) - value = IasWdClusterHandler.set_bit(value, 7, mode, 3) - - await self.squawk(value) - - async def issue_start_warning( - self, - mode=WARNING_DEVICE_MODE_EMERGENCY, - strobe=WARNING_DEVICE_STROBE_YES, - siren_level=WARNING_DEVICE_SOUND_HIGH, - warning_duration=5, # seconds - strobe_duty_cycle=0x00, - strobe_intensity=WARNING_DEVICE_STROBE_HIGH, - ): - """Issue a start warning command. - - This command starts the WD operation. The WD alerts the surrounding area - by audible (siren) and visual (strobe) signals. - - strobe_duty_cycle indicates the length of the flash cycle. This provides a means - of varying the flash duration for different alarm types (e.g., fire, police, - burglar). Valid range is 0-100 in increments of 10. All other values SHALL - be rounded to the nearest valid value. Strobe SHALL calculate duty cycle over - a duration of one second. - - The ON state SHALL precede the OFF state. For example, if Strobe Duty Cycle - Field specifies “40,” then the strobe SHALL flash ON for 4/10ths of a second - and then turn OFF for 6/10ths of a second. - """ - value = 0 - value = IasWdClusterHandler.set_bit(value, 0, siren_level, 0) - value = IasWdClusterHandler.set_bit(value, 1, siren_level, 1) - - value = IasWdClusterHandler.set_bit(value, 2, strobe, 0) - - value = IasWdClusterHandler.set_bit(value, 4, mode, 0) - value = IasWdClusterHandler.set_bit(value, 5, mode, 1) - value = IasWdClusterHandler.set_bit(value, 6, mode, 2) - value = IasWdClusterHandler.set_bit(value, 7, mode, 3) - - await self.start_warning( - value, warning_duration, strobe_duty_cycle, strobe_intensity - ) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(IasZone.cluster_id) -class IASZoneClusterHandler(ClusterHandler): - """Cluster handler for the IASZone Zigbee cluster.""" - - ZCL_INIT_ATTRS = { - IasZone.AttributeDefs.zone_status.name: False, - IasZone.AttributeDefs.zone_state.name: True, - IasZone.AttributeDefs.zone_type.name: True, - } - - @callback - def cluster_command(self, tsn, command_id, args): - """Handle commands received to this cluster.""" - if command_id == IasZone.ClientCommandDefs.status_change_notification.id: - zone_status = args[0] - # update attribute cache with new zone status - self.cluster.update_attribute( - IasZone.AttributeDefs.zone_status.id, zone_status - ) - self.debug("Updated alarm state: %s", zone_status) - elif command_id == IasZone.ClientCommandDefs.enroll.id: - self.debug("Enroll requested") - self._cluster.create_catching_task( - self.enroll_response( - enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 - ) - ) - - async def async_configure(self): - """Configure IAS device.""" - await self.get_attribute_value( - IasZone.AttributeDefs.zone_type.name, from_cache=False - ) - if self._endpoint.device.skip_configuration: - self.debug("skipping IASZoneClusterHandler configuration") - return - - self.debug("started IASZoneClusterHandler configuration") - - await self.bind() - ieee = self.cluster.endpoint.device.application.state.node_info.ieee - - try: - await self.write_attributes_safe( - {IasZone.AttributeDefs.cie_addr.name: ieee} - ) - self.debug( - "wrote cie_addr: %s to '%s' cluster", - str(ieee), - self._cluster.ep_attribute, - ) - except HomeAssistantError as ex: - self.debug( - "Failed to write cie_addr: %s to '%s' cluster: %s", - str(ieee), - self._cluster.ep_attribute, - str(ex), - ) - - self.debug("Sending pro-active IAS enroll response") - self._cluster.create_catching_task( - self.enroll_response( - enroll_response_code=IasZone.EnrollResponse.Success, zone_id=0 - ) - ) - - self._status = ClusterHandlerStatus.CONFIGURED - self.debug("finished IASZoneClusterHandler configuration") - - @callback - def attribute_updated(self, attrid: int, value: Any, _: Any) -> None: - """Handle attribute updates on this cluster.""" - if attrid == IasZone.AttributeDefs.zone_status.id: - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - attrid, - IasZone.AttributeDefs.zone_status.name, - value, - ) diff --git a/homeassistant/components/zha/core/cluster_handlers/smartenergy.py b/homeassistant/components/zha/core/cluster_handlers/smartenergy.py deleted file mode 100644 index d167b8b1752..00000000000 --- a/homeassistant/components/zha/core/cluster_handlers/smartenergy.py +++ /dev/null @@ -1,388 +0,0 @@ -"""Smart energy cluster handlers module for Zigbee Home Automation.""" - -from __future__ import annotations - -import enum -from functools import partialmethod -from typing import TYPE_CHECKING - -import zigpy.zcl -from zigpy.zcl.clusters.smartenergy import ( - Calendar, - DeviceManagement, - Drlc, - EnergyManagement, - Events, - KeyEstablishment, - MduPairing, - Messaging, - Metering, - Prepayment, - Price, - Tunneling, -) - -from .. import registries -from ..const import ( - REPORT_CONFIG_ASAP, - REPORT_CONFIG_DEFAULT, - REPORT_CONFIG_OP, - SIGNAL_ATTR_UPDATED, -) -from . import AttrReportConfig, ClusterHandler - -if TYPE_CHECKING: - from ..endpoint import Endpoint - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Calendar.cluster_id) -class CalendarClusterHandler(ClusterHandler): - """Calendar cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(DeviceManagement.cluster_id) -class DeviceManagementClusterHandler(ClusterHandler): - """Device Management cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Drlc.cluster_id) -class DrlcClusterHandler(ClusterHandler): - """Demand Response and Load Control cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(EnergyManagement.cluster_id) -class EnergyManagementClusterHandler(ClusterHandler): - """Energy Management cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Events.cluster_id) -class EventsClusterHandler(ClusterHandler): - """Event cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(KeyEstablishment.cluster_id) -class KeyEstablishmentClusterHandler(ClusterHandler): - """Key Establishment cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(MduPairing.cluster_id) -class MduPairingClusterHandler(ClusterHandler): - """Pairing cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Messaging.cluster_id) -class MessagingClusterHandler(ClusterHandler): - """Messaging cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Metering.cluster_id) -class MeteringClusterHandler(ClusterHandler): - """Metering cluster handler.""" - - REPORT_CONFIG = ( - AttrReportConfig( - attr=Metering.AttributeDefs.instantaneous_demand.name, - config=REPORT_CONFIG_OP, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier1_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier2_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier3_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier4_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier5_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_tier6_summ_delivered.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.current_summ_received.name, - config=REPORT_CONFIG_DEFAULT, - ), - AttrReportConfig( - attr=Metering.AttributeDefs.status.name, - config=REPORT_CONFIG_ASAP, - ), - ) - ZCL_INIT_ATTRS = { - Metering.AttributeDefs.demand_formatting.name: True, - Metering.AttributeDefs.divisor.name: True, - Metering.AttributeDefs.metering_device_type.name: True, - Metering.AttributeDefs.multiplier.name: True, - Metering.AttributeDefs.summation_formatting.name: True, - Metering.AttributeDefs.unit_of_measure.name: True, - } - - METERING_DEVICE_TYPES_ELECTRIC = { - 0, - 7, - 8, - 9, - 10, - 11, - 13, - 14, - 15, - 127, - 134, - 135, - 136, - 137, - 138, - 140, - 141, - 142, - } - METERING_DEVICE_TYPES_GAS = {1, 128} - METERING_DEVICE_TYPES_WATER = {2, 129} - METERING_DEVICE_TYPES_HEATING_COOLING = {3, 5, 6, 130, 132, 133} - - metering_device_type = { - 0: "Electric Metering", - 1: "Gas Metering", - 2: "Water Metering", - 3: "Thermal Metering", # deprecated - 4: "Pressure Metering", - 5: "Heat Metering", - 6: "Cooling Metering", - 7: "End Use Measurement Device (EUMD) for metering electric vehicle charging", - 8: "PV Generation Metering", - 9: "Wind Turbine Generation Metering", - 10: "Water Turbine Generation Metering", - 11: "Micro Generation Metering", - 12: "Solar Hot Water Generation Metering", - 13: "Electric Metering Element/Phase 1", - 14: "Electric Metering Element/Phase 2", - 15: "Electric Metering Element/Phase 3", - 127: "Mirrored Electric Metering", - 128: "Mirrored Gas Metering", - 129: "Mirrored Water Metering", - 130: "Mirrored Thermal Metering", # deprecated - 131: "Mirrored Pressure Metering", - 132: "Mirrored Heat Metering", - 133: "Mirrored Cooling Metering", - 134: "Mirrored End Use Measurement Device (EUMD) for metering electric vehicle charging", - 135: "Mirrored PV Generation Metering", - 136: "Mirrored Wind Turbine Generation Metering", - 137: "Mirrored Water Turbine Generation Metering", - 138: "Mirrored Micro Generation Metering", - 139: "Mirrored Solar Hot Water Generation Metering", - 140: "Mirrored Electric Metering Element/Phase 1", - 141: "Mirrored Electric Metering Element/Phase 2", - 142: "Mirrored Electric Metering Element/Phase 3", - } - - class DeviceStatusElectric(enum.IntFlag): - """Electric Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - POWER_FAILURE = 8 - POWER_QUALITY = 16 - LEAK_DETECT = 32 # Really? - SERVICE_DISCONNECT = 64 - RESERVED = 128 - - class DeviceStatusGas(enum.IntFlag): - """Gas Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - NOT_DEFINED = 8 - LOW_PRESSURE = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusWater(enum.IntFlag): - """Water Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - PIPE_EMPTY = 8 - LOW_PRESSURE = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusHeatingCooling(enum.IntFlag): - """Heating and Cooling Metering Device Status.""" - - NO_ALARMS = 0 - CHECK_METER = 1 - LOW_BATTERY = 2 - TAMPER_DETECT = 4 - TEMPERATURE_SENSOR = 8 - BURST_DETECT = 16 - LEAK_DETECT = 32 - SERVICE_DISCONNECT = 64 - REVERSE_FLOW = 128 - - class DeviceStatusDefault(enum.IntFlag): - """Metering Device Status.""" - - NO_ALARMS = 0 - - class FormatSelector(enum.IntEnum): - """Format specified selector.""" - - DEMAND = 0 - SUMMATION = 1 - - def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None: - """Initialize Metering.""" - super().__init__(cluster, endpoint) - self._format_spec: str | None = None - self._summa_format: str | None = None - - @property - def divisor(self) -> int: - """Return divisor for the value.""" - return self.cluster.get(Metering.AttributeDefs.divisor.name) or 1 - - @property - def device_type(self) -> str | int | None: - """Return metering device type.""" - dev_type = self.cluster.get(Metering.AttributeDefs.metering_device_type.name) - if dev_type is None: - return None - return self.metering_device_type.get(dev_type, dev_type) - - @property - def multiplier(self) -> int: - """Return multiplier for the value.""" - return self.cluster.get(Metering.AttributeDefs.multiplier.name) or 1 - - @property - def status(self) -> int | None: - """Return metering device status.""" - if (status := self.cluster.get(Metering.AttributeDefs.status.name)) is None: - return None - - metering_device_type = self.cluster.get( - Metering.AttributeDefs.metering_device_type.name - ) - if metering_device_type in self.METERING_DEVICE_TYPES_ELECTRIC: - return self.DeviceStatusElectric(status) - if metering_device_type in self.METERING_DEVICE_TYPES_GAS: - return self.DeviceStatusGas(status) - if metering_device_type in self.METERING_DEVICE_TYPES_WATER: - return self.DeviceStatusWater(status) - if metering_device_type in self.METERING_DEVICE_TYPES_HEATING_COOLING: - return self.DeviceStatusHeatingCooling(status) - return self.DeviceStatusDefault(status) - - @property - def unit_of_measurement(self) -> int: - """Return unit of measurement.""" - return self.cluster.get(Metering.AttributeDefs.unit_of_measure.name) - - async def async_initialize_cluster_handler_specific(self, from_cache: bool) -> None: - """Fetch config from device and updates format specifier.""" - - fmting = self.cluster.get( - Metering.AttributeDefs.demand_formatting.name, 0xF9 - ) # 1 digit to the right, 15 digits to the left - self._format_spec = self.get_formatting(fmting) - - fmting = self.cluster.get( - Metering.AttributeDefs.summation_formatting.name, 0xF9 - ) # 1 digit to the right, 15 digits to the left - self._summa_format = self.get_formatting(fmting) - - async def async_update(self) -> None: - """Retrieve latest state.""" - self.debug("async_update") - - attrs = [ - a["attr"] - for a in self.REPORT_CONFIG - if a["attr"] not in self.cluster.unsupported_attributes - ] - result = await self.get_attributes(attrs, from_cache=False, only_cache=False) - if result: - for attr, value in result.items(): - self.async_send_signal( - f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}", - self.cluster.find_attribute(attr).id, - attr, - value, - ) - - @staticmethod - def get_formatting(formatting: int) -> str: - """Return a formatting string, given the formatting value. - - Bits 0 to 2: Number of Digits to the right of the Decimal Point. - Bits 3 to 6: Number of Digits to the left of the Decimal Point. - Bit 7: If set, suppress leading zeros. - """ - r_digits = int(formatting & 0x07) # digits to the right of decimal point - l_digits = (formatting >> 3) & 0x0F # digits to the left of decimal point - if l_digits == 0: - l_digits = 15 - width = r_digits + l_digits + (1 if r_digits > 0 else 0) - - if formatting & 0x80: - # suppress leading 0 - return f"{{:{width}.{r_digits}f}}" - - return f"{{:0{width}.{r_digits}f}}" - - def _formatter_function( - self, selector: FormatSelector, value: int - ) -> int | float | str: - """Return formatted value for display.""" - value_float = value * self.multiplier / self.divisor - if self.unit_of_measurement == 0: - # Zigbee spec power unit is kW, but we show the value in W - value_watt = value_float * 1000 - if value_watt < 100: - return round(value_watt, 1) - return round(value_watt) - if selector == self.FormatSelector.SUMMATION: - assert self._summa_format - return self._summa_format.format(value_float).lstrip() - assert self._format_spec - return self._format_spec.format(value_float).lstrip() - - demand_formatter = partialmethod(_formatter_function, FormatSelector.DEMAND) - summa_formatter = partialmethod(_formatter_function, FormatSelector.SUMMATION) - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Prepayment.cluster_id) -class PrepaymentClusterHandler(ClusterHandler): - """Prepayment cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Price.cluster_id) -class PriceClusterHandler(ClusterHandler): - """Price cluster handler.""" - - -@registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.register(Tunneling.cluster_id) -class TunnelingClusterHandler(ClusterHandler): - """Tunneling cluster handler.""" diff --git a/homeassistant/components/zha/core/const.py b/homeassistant/components/zha/core/const.py deleted file mode 100644 index 2359fe0a1c3..00000000000 --- a/homeassistant/components/zha/core/const.py +++ /dev/null @@ -1,423 +0,0 @@ -"""All constants related to the ZHA component.""" - -from __future__ import annotations - -import enum -import logging - -import bellows.zigbee.application -import voluptuous as vol -import zigpy.application -import zigpy.types as t -import zigpy_deconz.zigbee.application -import zigpy_xbee.zigbee.application -import zigpy_zigate.zigbee.application -import zigpy_znp.zigbee.application - -from homeassistant.const import Platform -import homeassistant.helpers.config_validation as cv - -ATTR_ACTIVE_COORDINATOR = "active_coordinator" -ATTR_ARGS = "args" -ATTR_ATTRIBUTE = "attribute" -ATTR_ATTRIBUTE_ID = "attribute_id" -ATTR_ATTRIBUTE_NAME = "attribute_name" -ATTR_AVAILABLE = "available" -ATTR_CLUSTER_ID = "cluster_id" -ATTR_CLUSTER_TYPE = "cluster_type" -ATTR_COMMAND_TYPE = "command_type" -ATTR_DEVICE_IEEE = "device_ieee" -ATTR_DEVICE_TYPE = "device_type" -ATTR_ENDPOINTS = "endpoints" -ATTR_ENDPOINT_NAMES = "endpoint_names" -ATTR_ENDPOINT_ID = "endpoint_id" -ATTR_IEEE = "ieee" -ATTR_IN_CLUSTERS = "in_clusters" -ATTR_LAST_SEEN = "last_seen" -ATTR_LEVEL = "level" -ATTR_LQI = "lqi" -ATTR_MANUFACTURER = "manufacturer" -ATTR_MANUFACTURER_CODE = "manufacturer_code" -ATTR_MEMBERS = "members" -ATTR_MODEL = "model" -ATTR_NEIGHBORS = "neighbors" -ATTR_NODE_DESCRIPTOR = "node_descriptor" -ATTR_NWK = "nwk" -ATTR_OUT_CLUSTERS = "out_clusters" -ATTR_PARAMS = "params" -ATTR_POWER_SOURCE = "power_source" -ATTR_PROFILE_ID = "profile_id" -ATTR_QUIRK_APPLIED = "quirk_applied" -ATTR_QUIRK_CLASS = "quirk_class" -ATTR_QUIRK_ID = "quirk_id" -ATTR_ROUTES = "routes" -ATTR_RSSI = "rssi" -ATTR_SIGNATURE = "signature" -ATTR_TYPE = "type" -ATTR_UNIQUE_ID = "unique_id" -ATTR_VALUE = "value" -ATTR_WARNING_DEVICE_DURATION = "duration" -ATTR_WARNING_DEVICE_MODE = "mode" -ATTR_WARNING_DEVICE_STROBE = "strobe" -ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE = "duty_cycle" -ATTR_WARNING_DEVICE_STROBE_INTENSITY = "intensity" - -BAUD_RATES = [2400, 4800, 9600, 14400, 19200, 38400, 57600, 115200, 128000, 256000] -BINDINGS = "bindings" - -CLUSTER_DETAILS = "cluster_details" - -CLUSTER_HANDLER_ACCELEROMETER = "accelerometer" -CLUSTER_HANDLER_BINARY_INPUT = "binary_input" -CLUSTER_HANDLER_ANALOG_INPUT = "analog_input" -CLUSTER_HANDLER_ANALOG_OUTPUT = "analog_output" -CLUSTER_HANDLER_ATTRIBUTE = "attribute" -CLUSTER_HANDLER_BASIC = "basic" -CLUSTER_HANDLER_COLOR = "light_color" -CLUSTER_HANDLER_COVER = "window_covering" -CLUSTER_HANDLER_DEVICE_TEMPERATURE = "device_temperature" -CLUSTER_HANDLER_DOORLOCK = "door_lock" -CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT = "electrical_measurement" -CLUSTER_HANDLER_EVENT_RELAY = "event_relay" -CLUSTER_HANDLER_FAN = "fan" -CLUSTER_HANDLER_HUMIDITY = "humidity" -CLUSTER_HANDLER_HUE_OCCUPANCY = "philips_occupancy" -CLUSTER_HANDLER_SOIL_MOISTURE = "soil_moisture" -CLUSTER_HANDLER_LEAF_WETNESS = "leaf_wetness" -CLUSTER_HANDLER_IAS_ACE = "ias_ace" -CLUSTER_HANDLER_IAS_WD = "ias_wd" -CLUSTER_HANDLER_IDENTIFY = "identify" -CLUSTER_HANDLER_ILLUMINANCE = "illuminance" -CLUSTER_HANDLER_LEVEL = ATTR_LEVEL -CLUSTER_HANDLER_MULTISTATE_INPUT = "multistate_input" -CLUSTER_HANDLER_OCCUPANCY = "occupancy" -CLUSTER_HANDLER_ON_OFF = "on_off" -CLUSTER_HANDLER_OTA = "ota" -CLUSTER_HANDLER_POWER_CONFIGURATION = "power" -CLUSTER_HANDLER_PRESSURE = "pressure" -CLUSTER_HANDLER_SHADE = "shade" -CLUSTER_HANDLER_SMARTENERGY_METERING = "smartenergy_metering" -CLUSTER_HANDLER_TEMPERATURE = "temperature" -CLUSTER_HANDLER_THERMOSTAT = "thermostat" -CLUSTER_HANDLER_ZDO = "zdo" -CLUSTER_HANDLER_ZONE = ZONE = "ias_zone" -CLUSTER_HANDLER_INOVELLI = "inovelli_vzm31sn_cluster" - -CLUSTER_COMMAND_SERVER = "server" -CLUSTER_COMMANDS_CLIENT = "client_commands" -CLUSTER_COMMANDS_SERVER = "server_commands" -CLUSTER_TYPE_IN = "in" -CLUSTER_TYPE_OUT = "out" - -PLATFORMS = ( - Platform.ALARM_CONTROL_PANEL, - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.CLIMATE, - Platform.COVER, - Platform.DEVICE_TRACKER, - Platform.FAN, - Platform.LIGHT, - Platform.LOCK, - Platform.NUMBER, - Platform.SELECT, - Platform.SENSOR, - Platform.SIREN, - Platform.SWITCH, - Platform.UPDATE, -) - -CONF_ALARM_MASTER_CODE = "alarm_master_code" -CONF_ALARM_FAILED_TRIES = "alarm_failed_tries" -CONF_ALARM_ARM_REQUIRES_CODE = "alarm_arm_requires_code" - -CONF_BAUDRATE = "baudrate" -CONF_FLOW_CONTROL = "flow_control" -CONF_CUSTOM_QUIRKS_PATH = "custom_quirks_path" -CONF_DEFAULT_LIGHT_TRANSITION = "default_light_transition" -CONF_DEVICE_CONFIG = "device_config" -CONF_ENABLE_ENHANCED_LIGHT_TRANSITION = "enhanced_light_transition" -CONF_ENABLE_LIGHT_TRANSITIONING_FLAG = "light_transitioning_flag" -CONF_ALWAYS_PREFER_XY_COLOR_MODE = "always_prefer_xy_color_mode" -CONF_GROUP_MEMBERS_ASSUME_STATE = "group_members_assume_state" -CONF_ENABLE_IDENTIFY_ON_JOIN = "enable_identify_on_join" -CONF_ENABLE_QUIRKS = "enable_quirks" -CONF_RADIO_TYPE = "radio_type" -CONF_USB_PATH = "usb_path" -CONF_USE_THREAD = "use_thread" -CONF_ZIGPY = "zigpy_config" - -CONF_CONSIDER_UNAVAILABLE_MAINS = "consider_unavailable_mains" -CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS = 60 * 60 * 2 # 2 hours -CONF_CONSIDER_UNAVAILABLE_BATTERY = "consider_unavailable_battery" -CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY = 60 * 60 * 6 # 6 hours - -CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( - { - vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( - vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) - ), - vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, - vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, - vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, - vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, - vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_MAINS, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - ): cv.positive_int, - vol.Optional( - CONF_CONSIDER_UNAVAILABLE_BATTERY, - default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - ): cv.positive_int, - } -) - -CONF_ZHA_ALARM_SCHEMA = vol.Schema( - { - vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, - vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, - vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, - } -) - -CUSTOM_CONFIGURATION = "custom_configuration" - -DATA_DEVICE_CONFIG = "zha_device_config" -DATA_ZHA = "zha" -DATA_ZHA_CONFIG = "config" -DATA_ZHA_CORE_EVENTS = "zha_core_events" -DATA_ZHA_DEVICE_TRIGGER_CACHE = "zha_device_trigger_cache" -DATA_ZHA_GATEWAY = "zha_gateway" - -DEBUG_COMP_BELLOWS = "bellows" -DEBUG_COMP_ZHA = "homeassistant.components.zha" -DEBUG_COMP_ZIGPY = "zigpy" -DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" -DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" -DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" -DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" -DEBUG_LEVEL_CURRENT = "current" -DEBUG_LEVEL_ORIGINAL = "original" -DEBUG_LEVELS = { - DEBUG_COMP_BELLOWS: logging.DEBUG, - DEBUG_COMP_ZHA: logging.DEBUG, - DEBUG_COMP_ZIGPY: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, - DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, - DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, - DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, -} -DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY] - -DEFAULT_RADIO_TYPE = "ezsp" -DEFAULT_BAUDRATE = 57600 -DEFAULT_DATABASE_NAME = "zigbee.db" - -DEVICE_PAIRING_STATUS = "pairing_status" - -DISCOVERY_KEY = "zha_discovery_info" - -DOMAIN = "zha" - -ENTITY_METADATA = "entity_metadata" - -GROUP_ID = "group_id" -GROUP_IDS = "group_ids" -GROUP_NAME = "group_name" - -MFG_CLUSTER_ID_START = 0xFC00 - -POWER_MAINS_POWERED = "Mains" -POWER_BATTERY_OR_UNKNOWN = "Battery or Unknown" - -PRESET_SCHEDULE = "Schedule" -PRESET_COMPLEX = "Complex" -PRESET_TEMP_MANUAL = "Temporary manual" - -ZCL_INIT_ATTRS = "ZCL_INIT_ATTRS" - -ZHA_ALARM_OPTIONS = "zha_alarm_options" -ZHA_OPTIONS = "zha_options" - -ZHA_CONFIG_SCHEMAS = { - ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, - ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, -} - -type _ControllerClsType = type[zigpy.application.ControllerApplication] - - -class RadioType(enum.Enum): - """Possible options for radio type.""" - - ezsp = ( - "EZSP = Silicon Labs EmberZNet protocol: Elelabs, HUSBZB-1, Telegesis", - bellows.zigbee.application.ControllerApplication, - ) - znp = ( - "ZNP = Texas Instruments Z-Stack ZNP protocol: CC253x, CC26x2, CC13x2", - zigpy_znp.zigbee.application.ControllerApplication, - ) - deconz = ( - "deCONZ = dresden elektronik deCONZ protocol: ConBee I/II, RaspBee I/II", - zigpy_deconz.zigbee.application.ControllerApplication, - ) - zigate = ( - "ZiGate = ZiGate Zigbee radios: PiZiGate, ZiGate USB-TTL, ZiGate WiFi", - zigpy_zigate.zigbee.application.ControllerApplication, - ) - xbee = ( - "XBee = Digi XBee Zigbee radios: Digi XBee Series 2, 2C, 3", - zigpy_xbee.zigbee.application.ControllerApplication, - ) - - @classmethod - def list(cls) -> list[str]: - """Return a list of descriptions.""" - return [e.description for e in RadioType] - - @classmethod - def get_by_description(cls, description: str) -> RadioType: - """Get radio by description.""" - for radio in cls: - if radio.description == description: - return radio - raise ValueError - - def __init__(self, description: str, controller_cls: _ControllerClsType) -> None: - """Init instance.""" - self._desc = description - self._ctrl_cls = controller_cls - - @property - def controller(self) -> _ControllerClsType: - """Return controller class.""" - return self._ctrl_cls - - @property - def description(self) -> str: - """Return radio type description.""" - return self._desc - - -REPORT_CONFIG_ATTR_PER_REQ = 3 -REPORT_CONFIG_MAX_INT = 900 -REPORT_CONFIG_MAX_INT_BATTERY_SAVE = 10800 -REPORT_CONFIG_MIN_INT = 30 -REPORT_CONFIG_MIN_INT_ASAP = 1 -REPORT_CONFIG_MIN_INT_IMMEDIATE = 0 -REPORT_CONFIG_MIN_INT_OP = 5 -REPORT_CONFIG_MIN_INT_BATTERY_SAVE = 3600 -REPORT_CONFIG_RPT_CHANGE = 1 -REPORT_CONFIG_DEFAULT = ( - REPORT_CONFIG_MIN_INT, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_ASAP = ( - REPORT_CONFIG_MIN_INT_ASAP, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_BATTERY_SAVE = ( - REPORT_CONFIG_MIN_INT_BATTERY_SAVE, - REPORT_CONFIG_MAX_INT_BATTERY_SAVE, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_IMMEDIATE = ( - REPORT_CONFIG_MIN_INT_IMMEDIATE, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) -REPORT_CONFIG_OP = ( - REPORT_CONFIG_MIN_INT_OP, - REPORT_CONFIG_MAX_INT, - REPORT_CONFIG_RPT_CHANGE, -) - -SENSOR_ACCELERATION = "acceleration" -SENSOR_BATTERY = "battery" -SENSOR_ELECTRICAL_MEASUREMENT = CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT -SENSOR_GENERIC = "generic" -SENSOR_HUMIDITY = CLUSTER_HANDLER_HUMIDITY -SENSOR_ILLUMINANCE = CLUSTER_HANDLER_ILLUMINANCE -SENSOR_METERING = "metering" -SENSOR_OCCUPANCY = CLUSTER_HANDLER_OCCUPANCY -SENSOR_OPENING = "opening" -SENSOR_PRESSURE = CLUSTER_HANDLER_PRESSURE -SENSOR_TEMPERATURE = CLUSTER_HANDLER_TEMPERATURE -SENSOR_TYPE = "sensor_type" - -SIGNAL_ADD_ENTITIES = "zha_add_new_entities" -SIGNAL_ATTR_UPDATED = "attribute_updated" -SIGNAL_AVAILABLE = "available" -SIGNAL_MOVE_LEVEL = "move_level" -SIGNAL_REMOVE = "remove" -SIGNAL_SET_LEVEL = "set_level" -SIGNAL_STATE_ATTR = "update_state_attribute" -SIGNAL_UPDATE_DEVICE = "{}_zha_update_device" -SIGNAL_GROUP_ENTITY_REMOVED = "group_entity_removed" -SIGNAL_GROUP_MEMBERSHIP_CHANGE = "group_membership_change" - -UNKNOWN = "unknown" -UNKNOWN_MANUFACTURER = "unk_manufacturer" -UNKNOWN_MODEL = "unk_model" - -WARNING_DEVICE_MODE_STOP = 0 -WARNING_DEVICE_MODE_BURGLAR = 1 -WARNING_DEVICE_MODE_FIRE = 2 -WARNING_DEVICE_MODE_EMERGENCY = 3 -WARNING_DEVICE_MODE_POLICE_PANIC = 4 -WARNING_DEVICE_MODE_FIRE_PANIC = 5 -WARNING_DEVICE_MODE_EMERGENCY_PANIC = 6 - -WARNING_DEVICE_STROBE_NO = 0 -WARNING_DEVICE_STROBE_YES = 1 - -WARNING_DEVICE_SOUND_LOW = 0 -WARNING_DEVICE_SOUND_MEDIUM = 1 -WARNING_DEVICE_SOUND_HIGH = 2 -WARNING_DEVICE_SOUND_VERY_HIGH = 3 - -WARNING_DEVICE_STROBE_LOW = 0x00 -WARNING_DEVICE_STROBE_MEDIUM = 0x01 -WARNING_DEVICE_STROBE_HIGH = 0x02 -WARNING_DEVICE_STROBE_VERY_HIGH = 0x03 - -WARNING_DEVICE_SQUAWK_MODE_ARMED = 0 -WARNING_DEVICE_SQUAWK_MODE_DISARMED = 1 - -ZHA_DISCOVERY_NEW = "zha_discovery_new_{}" -ZHA_CLUSTER_HANDLER_MSG = "zha_channel_message" -ZHA_CLUSTER_HANDLER_MSG_BIND = "zha_channel_bind" -ZHA_CLUSTER_HANDLER_MSG_CFG_RPT = "zha_channel_configure_reporting" -ZHA_CLUSTER_HANDLER_MSG_DATA = "zha_channel_msg_data" -ZHA_CLUSTER_HANDLER_CFG_DONE = "zha_channel_cfg_done" -ZHA_CLUSTER_HANDLER_READS_PER_REQ = 5 -ZHA_EVENT = "zha_event" -ZHA_GW_MSG = "zha_gateway_message" -ZHA_GW_MSG_DEVICE_FULL_INIT = "device_fully_initialized" -ZHA_GW_MSG_DEVICE_INFO = "device_info" -ZHA_GW_MSG_DEVICE_JOINED = "device_joined" -ZHA_GW_MSG_DEVICE_REMOVED = "device_removed" -ZHA_GW_MSG_GROUP_ADDED = "group_added" -ZHA_GW_MSG_GROUP_INFO = "group_info" -ZHA_GW_MSG_GROUP_MEMBER_ADDED = "group_member_added" -ZHA_GW_MSG_GROUP_MEMBER_REMOVED = "group_member_removed" -ZHA_GW_MSG_GROUP_REMOVED = "group_removed" -ZHA_GW_MSG_LOG_ENTRY = "log_entry" -ZHA_GW_MSG_LOG_OUTPUT = "log_output" -ZHA_GW_MSG_RAW_INIT = "raw_device_initialized" - - -class Strobe(t.enum8): - """Strobe enum.""" - - No_Strobe = 0x00 - Strobe = 0x01 - - -EZSP_OVERWRITE_EUI64 = ( - "i_understand_i_can_update_eui64_only_once_and_i_still_want_to_do_it" -) diff --git a/homeassistant/components/zha/core/decorators.py b/homeassistant/components/zha/core/decorators.py deleted file mode 100644 index d20fb7f2a38..00000000000 --- a/homeassistant/components/zha/core/decorators.py +++ /dev/null @@ -1,56 +0,0 @@ -"""Decorators for ZHA core registries.""" - -from __future__ import annotations - -from collections.abc import Callable -from typing import Any - - -class DictRegistry[_TypeT: type[Any]](dict[int | str, _TypeT]): - """Dict Registry of items.""" - - def register(self, name: int | str) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - self[name] = cluster_handler - return cluster_handler - - return decorator - - -class NestedDictRegistry[_TypeT: type[Any]]( - dict[int | str, dict[int | str | None, _TypeT]] -): - """Dict Registry of multiple items per key.""" - - def register( - self, name: int | str, sub_name: int | str | None = None - ) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific and a quirk name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - if name not in self: - self[name] = {} - self[name][sub_name] = cluster_handler - return cluster_handler - - return decorator - - -class SetRegistry(set[int | str]): - """Set Registry of items.""" - - def register[_TypeT: type[Any]]( - self, name: int | str - ) -> Callable[[_TypeT], _TypeT]: - """Return decorator to register item with a specific name.""" - - def decorator(cluster_handler: _TypeT) -> _TypeT: - """Register decorated cluster handler or item.""" - self.add(name) - return cluster_handler - - return decorator diff --git a/homeassistant/components/zha/core/device.py b/homeassistant/components/zha/core/device.py deleted file mode 100644 index 163674d614c..00000000000 --- a/homeassistant/components/zha/core/device.py +++ /dev/null @@ -1,1010 +0,0 @@ -"""Device for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -from collections.abc import Callable -from datetime import timedelta -from enum import Enum -from functools import cached_property -import logging -import random -import time -from typing import TYPE_CHECKING, Any, Self - -from zigpy import types -from zigpy.device import Device as ZigpyDevice -import zigpy.exceptions -from zigpy.profiles import PROFILES -import zigpy.quirks -from zigpy.quirks.v2 import CustomDeviceV2 -from zigpy.types.named import EUI64, NWK -from zigpy.zcl.clusters import Cluster -from zigpy.zcl.clusters.general import Groups, Identify -from zigpy.zcl.foundation import Status as ZclStatus, ZCLCommandDef -import zigpy.zdo.types as zdo_types - -from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID, ATTR_NAME -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.event import async_track_time_interval - -from . import const, discovery -from .cluster_handlers import ClusterHandler, ZDOClusterHandler -from .const import ( - ATTR_ACTIVE_COORDINATOR, - ATTR_ARGS, - ATTR_ATTRIBUTE, - ATTR_AVAILABLE, - ATTR_CLUSTER_ID, - ATTR_CLUSTER_TYPE, - ATTR_COMMAND_TYPE, - ATTR_DEVICE_TYPE, - ATTR_ENDPOINT_ID, - ATTR_ENDPOINT_NAMES, - ATTR_ENDPOINTS, - ATTR_IEEE, - ATTR_LAST_SEEN, - ATTR_LQI, - ATTR_MANUFACTURER, - ATTR_MANUFACTURER_CODE, - ATTR_MODEL, - ATTR_NEIGHBORS, - ATTR_NODE_DESCRIPTOR, - ATTR_NWK, - ATTR_PARAMS, - ATTR_POWER_SOURCE, - ATTR_QUIRK_APPLIED, - ATTR_QUIRK_CLASS, - ATTR_QUIRK_ID, - ATTR_ROUTES, - ATTR_RSSI, - ATTR_SIGNATURE, - ATTR_VALUE, - CLUSTER_COMMAND_SERVER, - CLUSTER_COMMANDS_CLIENT, - CLUSTER_COMMANDS_SERVER, - CLUSTER_TYPE_IN, - CLUSTER_TYPE_OUT, - CONF_CONSIDER_UNAVAILABLE_BATTERY, - CONF_CONSIDER_UNAVAILABLE_MAINS, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - CONF_ENABLE_IDENTIFY_ON_JOIN, - POWER_BATTERY_OR_UNKNOWN, - POWER_MAINS_POWERED, - SIGNAL_AVAILABLE, - SIGNAL_UPDATE_DEVICE, - UNKNOWN, - UNKNOWN_MANUFACTURER, - UNKNOWN_MODEL, - ZHA_OPTIONS, -) -from .endpoint import Endpoint -from .helpers import LogMixin, async_get_zha_config_value, convert_to_zcl_values - -if TYPE_CHECKING: - from ..websocket_api import ClusterBinding - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) -_UPDATE_ALIVE_INTERVAL = (60, 90) -_CHECKIN_GRACE_PERIODS = 2 - - -def get_device_automation_triggers( - device: zigpy.device.Device, -) -> dict[tuple[str, str], dict[str, str]]: - """Get the supported device automation triggers for a zigpy device.""" - return { - ("device_offline", "device_offline"): {"device_event_type": "device_offline"}, - **getattr(device, "device_automation_triggers", {}), - } - - -class DeviceStatus(Enum): - """Status of a device.""" - - CREATED = 1 - INITIALIZED = 2 - - -class ZHADevice(LogMixin): - """ZHA Zigbee device object.""" - - _ha_device_id: str - - def __init__( - self, - hass: HomeAssistant, - zigpy_device: zigpy.device.Device, - zha_gateway: ZHAGateway, - ) -> None: - """Initialize the gateway.""" - self.hass: HomeAssistant = hass - self._zigpy_device: ZigpyDevice = zigpy_device - self._zha_gateway: ZHAGateway = zha_gateway - self._available_signal: str = f"{self.name}_{self.ieee}_{SIGNAL_AVAILABLE}" - self._checkins_missed_count: int = 0 - self.unsubs: list[Callable[[], None]] = [] - self.quirk_applied: bool = isinstance( - self._zigpy_device, zigpy.quirks.CustomDevice - ) - self.quirk_class: str = ( - f"{self._zigpy_device.__class__.__module__}." - f"{self._zigpy_device.__class__.__name__}" - ) - self.quirk_id: str | None = getattr(self._zigpy_device, ATTR_QUIRK_ID, None) - - if self.is_mains_powered: - self.consider_unavailable_time: int = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_CONSIDER_UNAVAILABLE_MAINS, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, - ) - else: - self.consider_unavailable_time = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - ) - self._available: bool = self.is_coordinator or ( - self.last_seen is not None - and time.time() - self.last_seen < self.consider_unavailable_time - ) - self._zdo_handler: ZDOClusterHandler = ZDOClusterHandler(self) - self._power_config_ch: ClusterHandler | None = None - self._identify_ch: ClusterHandler | None = None - self._basic_ch: ClusterHandler | None = None - self.status: DeviceStatus = DeviceStatus.CREATED - - self._endpoints: dict[int, Endpoint] = {} - for ep_id, endpoint in zigpy_device.endpoints.items(): - if ep_id != 0: - self._endpoints[ep_id] = Endpoint.new(endpoint, self) - - if not self.is_coordinator: - keep_alive_interval = random.randint(*_UPDATE_ALIVE_INTERVAL) - self.debug( - "starting availability checks - interval: %s", keep_alive_interval - ) - self.unsubs.append( - async_track_time_interval( - self.hass, - self._check_available, - timedelta(seconds=keep_alive_interval), - ) - ) - - @property - def device_id(self) -> str: - """Return the HA device registry device id.""" - return self._ha_device_id - - def set_device_id(self, device_id: str) -> None: - """Set the HA device registry device id.""" - self._ha_device_id = device_id - - @property - def device(self) -> zigpy.device.Device: - """Return underlying Zigpy device.""" - return self._zigpy_device - - @property - def name(self) -> str: - """Return device name.""" - return f"{self.manufacturer} {self.model}" - - @property - def ieee(self) -> EUI64: - """Return ieee address for device.""" - return self._zigpy_device.ieee - - @property - def manufacturer(self) -> str: - """Return manufacturer for device.""" - if self._zigpy_device.manufacturer is None: - return UNKNOWN_MANUFACTURER - return self._zigpy_device.manufacturer - - @property - def model(self) -> str: - """Return model for device.""" - if self._zigpy_device.model is None: - return UNKNOWN_MODEL - return self._zigpy_device.model - - @property - def manufacturer_code(self) -> int | None: - """Return the manufacturer code for the device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.manufacturer_code - - @property - def nwk(self) -> NWK: - """Return nwk for device.""" - return self._zigpy_device.nwk - - @property - def lqi(self): - """Return lqi for device.""" - return self._zigpy_device.lqi - - @property - def rssi(self): - """Return rssi for device.""" - return self._zigpy_device.rssi - - @property - def last_seen(self) -> float | None: - """Return last_seen for device.""" - return self._zigpy_device.last_seen - - @property - def is_mains_powered(self) -> bool | None: - """Return true if device is mains powered.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_mains_powered - - @property - def device_type(self) -> str: - """Return the logical device type for the device.""" - if self._zigpy_device.node_desc is None: - return UNKNOWN - - return self._zigpy_device.node_desc.logical_type.name - - @property - def power_source(self) -> str: - """Return the power source for the device.""" - return ( - POWER_MAINS_POWERED if self.is_mains_powered else POWER_BATTERY_OR_UNKNOWN - ) - - @property - def is_router(self) -> bool | None: - """Return true if this is a routing capable device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_router - - @property - def is_coordinator(self) -> bool | None: - """Return true if this device represents a coordinator.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_coordinator - - @property - def is_active_coordinator(self) -> bool: - """Return true if this device is the active coordinator.""" - if not self.is_coordinator: - return False - - return self.ieee == self.gateway.state.node_info.ieee - - @property - def is_end_device(self) -> bool | None: - """Return true if this device is an end device.""" - if self._zigpy_device.node_desc is None: - return None - - return self._zigpy_device.node_desc.is_end_device - - @property - def is_groupable(self) -> bool: - """Return true if this device has a group cluster.""" - return self.is_coordinator or ( - self.available and bool(self.async_get_groupable_endpoints()) - ) - - @property - def skip_configuration(self) -> bool: - """Return true if the device should not issue configuration related commands.""" - return self._zigpy_device.skip_configuration or bool(self.is_coordinator) - - @property - def gateway(self): - """Return the gateway for this device.""" - return self._zha_gateway - - @cached_property - def device_automation_commands(self) -> dict[str, list[tuple[str, str]]]: - """Return the a lookup of commands to etype/sub_type.""" - commands: dict[str, list[tuple[str, str]]] = {} - for etype_subtype, trigger in self.device_automation_triggers.items(): - if command := trigger.get(ATTR_COMMAND): - commands.setdefault(command, []).append(etype_subtype) - return commands - - @cached_property - def device_automation_triggers(self) -> dict[tuple[str, str], dict[str, str]]: - """Return the device automation triggers for this device.""" - return get_device_automation_triggers(self._zigpy_device) - - @property - def available_signal(self) -> str: - """Signal to use to subscribe to device availability changes.""" - return self._available_signal - - @property - def available(self): - """Return True if device is available.""" - return self._available - - @available.setter - def available(self, new_availability: bool) -> None: - """Set device availability.""" - self._available = new_availability - - @property - def power_configuration_ch(self) -> ClusterHandler | None: - """Return power configuration cluster handler.""" - return self._power_config_ch - - @power_configuration_ch.setter - def power_configuration_ch(self, cluster_handler: ClusterHandler) -> None: - """Power configuration cluster handler setter.""" - if self._power_config_ch is None: - self._power_config_ch = cluster_handler - - @property - def basic_ch(self) -> ClusterHandler | None: - """Return basic cluster handler.""" - return self._basic_ch - - @basic_ch.setter - def basic_ch(self, cluster_handler: ClusterHandler) -> None: - """Set the basic cluster handler.""" - if self._basic_ch is None: - self._basic_ch = cluster_handler - - @property - def identify_ch(self) -> ClusterHandler | None: - """Return power configuration cluster handler.""" - return self._identify_ch - - @identify_ch.setter - def identify_ch(self, cluster_handler: ClusterHandler) -> None: - """Power configuration cluster handler setter.""" - if self._identify_ch is None: - self._identify_ch = cluster_handler - - @property - def zdo_cluster_handler(self) -> ZDOClusterHandler: - """Return ZDO cluster handler.""" - return self._zdo_handler - - @property - def endpoints(self) -> dict[int, Endpoint]: - """Return the endpoints for this device.""" - return self._endpoints - - @property - def zigbee_signature(self) -> dict[str, Any]: - """Get zigbee signature for this device.""" - return { - ATTR_NODE_DESCRIPTOR: str(self._zigpy_device.node_desc), - ATTR_ENDPOINTS: { - signature[0]: signature[1] - for signature in [ - endpoint.zigbee_signature for endpoint in self._endpoints.values() - ] - }, - ATTR_MANUFACTURER: self.manufacturer, - ATTR_MODEL: self.model, - } - - @property - def sw_version(self) -> str | None: - """Return the software version for this device.""" - device_registry = dr.async_get(self.hass) - reg_device: DeviceEntry | None = device_registry.async_get(self.device_id) - if reg_device is None: - return None - return reg_device.sw_version - - @classmethod - def new( - cls, - hass: HomeAssistant, - zigpy_dev: zigpy.device.Device, - gateway: ZHAGateway, - ) -> Self: - """Create new device.""" - zha_dev = cls(hass, zigpy_dev, gateway) - zha_dev.unsubs.append( - async_dispatcher_connect( - hass, - SIGNAL_UPDATE_DEVICE.format(str(zha_dev.ieee)), - zha_dev.async_update_sw_build_id, - ) - ) - discovery.PROBE.discover_device_entities(zha_dev) - return zha_dev - - @callback - def async_update_sw_build_id(self, sw_version: int) -> None: - """Update device sw version.""" - if self.device_id is None: - return - - device_registry = dr.async_get(self.hass) - device_registry.async_update_device( - self.device_id, sw_version=f"0x{sw_version:08x}" - ) - - async def _check_available(self, *_: Any) -> None: - # don't flip the availability state of the coordinator - if self.is_coordinator: - return - if self.last_seen is None: - self.debug("last_seen is None, marking the device unavailable") - self.update_available(False) - return - - difference = time.time() - self.last_seen - if difference < self.consider_unavailable_time: - self.debug( - "Device seen - marking the device available and resetting counter" - ) - self.update_available(True) - self._checkins_missed_count = 0 - return - - if self.hass.data[const.DATA_ZHA].allow_polling: - if ( - self._checkins_missed_count >= _CHECKIN_GRACE_PERIODS - or self.manufacturer == "LUMI" - or not self._endpoints - ): - self.debug( - ( - "last_seen is %s seconds ago and ping attempts have been exhausted," - " marking the device unavailable" - ), - difference, - ) - self.update_available(False) - return - - self._checkins_missed_count += 1 - self.debug( - "Attempting to checkin with device - missed checkins: %s", - self._checkins_missed_count, - ) - if not self.basic_ch: - self.debug("does not have a mandatory basic cluster") - self.update_available(False) - return - res = await self.basic_ch.get_attribute_value( - ATTR_MANUFACTURER, from_cache=False - ) - if res is not None: - self._checkins_missed_count = 0 - - def update_available(self, available: bool) -> None: - """Update device availability and signal entities.""" - self.debug( - ( - "Update device availability - device available: %s - new availability:" - " %s - changed: %s" - ), - self.available, - available, - self.available ^ available, - ) - availability_changed = self.available ^ available - self.available = available - if availability_changed and available: - # reinit cluster handlers then signal entities - self.debug( - "Device availability changed and device became available," - " reinitializing cluster handlers" - ) - self.hass.async_create_task(self._async_became_available()) - return - if availability_changed and not available: - self.debug("Device availability changed and device became unavailable") - self.zha_send_event( - { - "device_event_type": "device_offline", - }, - ) - async_dispatcher_send(self.hass, f"{self._available_signal}_entity") - - @callback - def zha_send_event(self, event_data: dict[str, str | int]) -> None: - """Relay events to hass.""" - self.hass.bus.async_fire( - const.ZHA_EVENT, - { - const.ATTR_DEVICE_IEEE: str(self.ieee), - const.ATTR_UNIQUE_ID: str(self.ieee), - ATTR_DEVICE_ID: self.device_id, - **event_data, - }, - ) - - async def _async_became_available(self) -> None: - """Update device availability and signal entities.""" - await self.async_initialize(False) - async_dispatcher_send(self.hass, f"{self._available_signal}_entity") - - @property - def device_info(self) -> dict[str, Any]: - """Return a device description for device.""" - ieee = str(self.ieee) - time_struct = time.localtime(self.last_seen) - update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) - return { - ATTR_IEEE: ieee, - ATTR_NWK: self.nwk, - ATTR_MANUFACTURER: self.manufacturer, - ATTR_MODEL: self.model, - ATTR_NAME: self.name or ieee, - ATTR_QUIRK_APPLIED: self.quirk_applied, - ATTR_QUIRK_CLASS: self.quirk_class, - ATTR_QUIRK_ID: self.quirk_id, - ATTR_MANUFACTURER_CODE: self.manufacturer_code, - ATTR_POWER_SOURCE: self.power_source, - ATTR_LQI: self.lqi, - ATTR_RSSI: self.rssi, - ATTR_LAST_SEEN: update_time, - ATTR_AVAILABLE: self.available, - ATTR_DEVICE_TYPE: self.device_type, - ATTR_SIGNATURE: self.zigbee_signature, - } - - async def async_configure(self) -> None: - """Configure the device.""" - should_identify = async_get_zha_config_value( - self._zha_gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_IDENTIFY_ON_JOIN, - True, - ) - self.debug("started configuration") - await self._zdo_handler.async_configure() - self._zdo_handler.debug("'async_configure' stage succeeded") - await asyncio.gather( - *(endpoint.async_configure() for endpoint in self._endpoints.values()) - ) - if isinstance(self._zigpy_device, CustomDeviceV2): - self.debug("applying quirks v2 custom device configuration") - await self._zigpy_device.apply_custom_configuration() - async_dispatcher_send( - self.hass, - const.ZHA_CLUSTER_HANDLER_MSG, - { - const.ATTR_TYPE: const.ZHA_CLUSTER_HANDLER_CFG_DONE, - }, - ) - self.debug("completed configuration") - - if ( - should_identify - and self.identify_ch is not None - and not self.skip_configuration - ): - await self.identify_ch.trigger_effect( - effect_id=Identify.EffectIdentifier.Okay, - effect_variant=Identify.EffectVariant.Default, - ) - - async def async_initialize(self, from_cache: bool = False) -> None: - """Initialize cluster handlers.""" - self.debug("started initialization") - await self._zdo_handler.async_initialize(from_cache) - self._zdo_handler.debug("'async_initialize' stage succeeded") - - # We intentionally do not use `gather` here! This is so that if, for example, - # three `device.async_initialize()`s are spawned, only three concurrent requests - # will ever be in flight at once. Startup concurrency is managed at the device - # level. - for endpoint in self._endpoints.values(): - try: - await endpoint.async_initialize(from_cache) - except Exception: # noqa: BLE001 - self.debug("Failed to initialize endpoint", exc_info=True) - - self.debug("power source: %s", self.power_source) - self.status = DeviceStatus.INITIALIZED - self.debug("completed initialization") - - @callback - def async_cleanup_handles(self) -> None: - """Unsubscribe the dispatchers and timers.""" - for unsubscribe in self.unsubs: - unsubscribe() - - @property - def zha_device_info(self) -> dict[str, Any]: - """Get ZHA device information.""" - device_info: dict[str, Any] = {} - device_info.update(self.device_info) - device_info[ATTR_ACTIVE_COORDINATOR] = self.is_active_coordinator - device_info["entities"] = [ - { - "entity_id": entity_ref.reference_id, - ATTR_NAME: entity_ref.device_info[ATTR_NAME], - } - for entity_ref in self.gateway.device_registry[self.ieee] - ] - - topology = self.gateway.application_controller.topology - device_info[ATTR_NEIGHBORS] = [ - { - "device_type": neighbor.device_type.name, - "rx_on_when_idle": neighbor.rx_on_when_idle.name, - "relationship": neighbor.relationship.name, - "extended_pan_id": str(neighbor.extended_pan_id), - "ieee": str(neighbor.ieee), - "nwk": str(neighbor.nwk), - "permit_joining": neighbor.permit_joining.name, - "depth": str(neighbor.depth), - "lqi": str(neighbor.lqi), - } - for neighbor in topology.neighbors[self.ieee] - ] - - device_info[ATTR_ROUTES] = [ - { - "dest_nwk": str(route.DstNWK), - "route_status": str(route.RouteStatus.name), - "memory_constrained": bool(route.MemoryConstrained), - "many_to_one": bool(route.ManyToOne), - "route_record_required": bool(route.RouteRecordRequired), - "next_hop": str(route.NextHop), - } - for route in topology.routes[self.ieee] - ] - - # Return endpoint device type Names - names: list[dict[str, str]] = [] - for endpoint in (ep for epid, ep in self.device.endpoints.items() if epid): - profile = PROFILES.get(endpoint.profile_id) - if profile and endpoint.device_type is not None: - # DeviceType provides undefined enums - names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) - else: - names.append( - { - ATTR_NAME: ( - f"unknown {endpoint.device_type} device_type " - f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" - ) - } - ) - device_info[ATTR_ENDPOINT_NAMES] = names - - device_registry = dr.async_get(self.hass) - reg_device = device_registry.async_get(self.device_id) - if reg_device is not None: - device_info["user_given_name"] = reg_device.name_by_user - device_info["device_reg_id"] = reg_device.id - device_info["area_id"] = reg_device.area_id - return device_info - - @callback - def async_get_clusters(self) -> dict[int, dict[str, dict[int, Cluster]]]: - """Get all clusters for this device.""" - return { - ep_id: { - CLUSTER_TYPE_IN: endpoint.in_clusters, - CLUSTER_TYPE_OUT: endpoint.out_clusters, - } - for (ep_id, endpoint) in self._zigpy_device.endpoints.items() - if ep_id != 0 - } - - @callback - def async_get_groupable_endpoints(self): - """Get device endpoints that have a group 'in' cluster.""" - return [ - ep_id - for (ep_id, clusters) in self.async_get_clusters().items() - if Groups.cluster_id in clusters[CLUSTER_TYPE_IN] - ] - - @callback - def async_get_std_clusters(self): - """Get ZHA and ZLL clusters for this device.""" - - return { - ep_id: { - CLUSTER_TYPE_IN: endpoint.in_clusters, - CLUSTER_TYPE_OUT: endpoint.out_clusters, - } - for (ep_id, endpoint) in self._zigpy_device.endpoints.items() - if ep_id != 0 and endpoint.profile_id in PROFILES - } - - @callback - def async_get_cluster( - self, endpoint_id: int, cluster_id: int, cluster_type: str = CLUSTER_TYPE_IN - ) -> Cluster: - """Get zigbee cluster from this entity.""" - clusters: dict[int, dict[str, dict[int, Cluster]]] = self.async_get_clusters() - return clusters[endpoint_id][cluster_type][cluster_id] - - @callback - def async_get_cluster_attributes( - self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN - ): - """Get zigbee attributes for specified cluster.""" - cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) - if cluster is None: - return None - return cluster.attributes - - @callback - def async_get_cluster_commands( - self, endpoint_id, cluster_id, cluster_type=CLUSTER_TYPE_IN - ): - """Get zigbee commands for specified cluster.""" - cluster = self.async_get_cluster(endpoint_id, cluster_id, cluster_type) - if cluster is None: - return None - return { - CLUSTER_COMMANDS_CLIENT: cluster.client_commands, - CLUSTER_COMMANDS_SERVER: cluster.server_commands, - } - - async def write_zigbee_attribute( - self, - endpoint_id, - cluster_id, - attribute, - value, - cluster_type=CLUSTER_TYPE_IN, - manufacturer=None, - ): - """Write a value to a zigbee attribute for a cluster in this entity.""" - try: - cluster: Cluster = self.async_get_cluster( - endpoint_id, cluster_id, cluster_type - ) - except KeyError as exc: - raise ValueError( - f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" - f" writing attribute {attribute} with value {value}" - ) from exc - - try: - response = await cluster.write_attributes( - {attribute: value}, manufacturer=manufacturer - ) - except zigpy.exceptions.ZigbeeException as exc: - raise HomeAssistantError( - f"Failed to set attribute: " - f"{ATTR_VALUE}: {value} " - f"{ATTR_ATTRIBUTE}: {attribute} " - f"{ATTR_CLUSTER_ID}: {cluster_id} " - f"{ATTR_ENDPOINT_ID}: {endpoint_id}" - ) from exc - - self.debug( - "set: %s for attr: %s to cluster: %s for ept: %s - res: %s", - value, - attribute, - cluster_id, - endpoint_id, - response, - ) - return response - - async def issue_cluster_command( - self, - endpoint_id: int, - cluster_id: int, - command: int, - command_type: str, - args: list | None, - params: dict[str, Any] | None, - cluster_type: str = CLUSTER_TYPE_IN, - manufacturer: int | None = None, - ) -> None: - """Issue a command against specified zigbee cluster on this device.""" - try: - cluster: Cluster = self.async_get_cluster( - endpoint_id, cluster_id, cluster_type - ) - except KeyError as exc: - raise ValueError( - f"Cluster {cluster_id} not found on endpoint {endpoint_id} while" - f" issuing command {command} with args {args}" - ) from exc - commands: dict[int, ZCLCommandDef] = ( - cluster.server_commands - if command_type == CLUSTER_COMMAND_SERVER - else cluster.client_commands - ) - if args is not None: - self.warning( - ( - "args [%s] are deprecated and should be passed with the params key." - " The parameter names are: %s" - ), - args, - [field.name for field in commands[command].schema.fields], - ) - response = await getattr(cluster, commands[command].name)(*args) - else: - assert params is not None - response = await getattr(cluster, commands[command].name)( - **convert_to_zcl_values(params, commands[command].schema) - ) - self.debug( - "Issued cluster command: %s %s %s %s %s %s %s %s", - f"{ATTR_CLUSTER_ID}: [{cluster_id}]", - f"{ATTR_CLUSTER_TYPE}: [{cluster_type}]", - f"{ATTR_ENDPOINT_ID}: [{endpoint_id}]", - f"{ATTR_COMMAND}: [{command}]", - f"{ATTR_COMMAND_TYPE}: [{command_type}]", - f"{ATTR_ARGS}: [{args}]", - f"{ATTR_PARAMS}: [{params}]", - f"{ATTR_MANUFACTURER}: [{manufacturer}]", - ) - if response is None: - return # client commands don't return a response - if isinstance(response, Exception): - raise HomeAssistantError("Failed to issue cluster command") from response - if response[1] is not ZclStatus.SUCCESS: - raise HomeAssistantError( - f"Failed to issue cluster command with status: {response[1]}" - ) - - async def async_add_to_group(self, group_id: int) -> None: - """Add this device to the provided zigbee group.""" - try: - # A group name is required. However, the spec also explicitly states that - # the group name can be ignored by the receiving device if a device cannot - # store it, so we cannot rely on it existing after being written. This is - # only done to make the ZCL command valid. - await self._zigpy_device.add_to_group(group_id, name=f"0x{group_id:04X}") - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to add device '%s' to group: 0x%04x ex: %s", - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_remove_from_group(self, group_id: int) -> None: - """Remove this device from the provided zigbee group.""" - try: - await self._zigpy_device.remove_from_group(group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to remove device '%s' from group: 0x%04x ex: %s", - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_add_endpoint_to_group( - self, endpoint_id: int, group_id: int - ) -> None: - """Add the device endpoint to the provided zigbee group.""" - try: - await self._zigpy_device.endpoints[endpoint_id].add_to_group( - group_id, name=f"0x{group_id:04X}" - ) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - "Failed to add endpoint: %s for device: '%s' to group: 0x%04x ex: %s", - endpoint_id, - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_remove_endpoint_from_group( - self, endpoint_id: int, group_id: int - ) -> None: - """Remove the device endpoint from the provided zigbee group.""" - try: - await self._zigpy_device.endpoints[endpoint_id].remove_from_group(group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - ( - "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" - " ex: %s" - ), - endpoint_id, - self._zigpy_device.ieee, - group_id, - str(ex), - ) - - async def async_bind_to_group( - self, group_id: int, cluster_bindings: list[ClusterBinding] - ) -> None: - """Directly bind this device to a group for the given clusters.""" - await self._async_group_binding_operation( - group_id, zdo_types.ZDOCmd.Bind_req, cluster_bindings - ) - - async def async_unbind_from_group( - self, group_id: int, cluster_bindings: list[ClusterBinding] - ) -> None: - """Unbind this device from a group for the given clusters.""" - await self._async_group_binding_operation( - group_id, zdo_types.ZDOCmd.Unbind_req, cluster_bindings - ) - - async def _async_group_binding_operation( - self, - group_id: int, - operation: zdo_types.ZDOCmd, - cluster_bindings: list[ClusterBinding], - ) -> None: - """Create or remove a direct zigbee binding between a device and a group.""" - - zdo = self._zigpy_device.zdo - op_msg = "0x%04x: %s %s, ep: %s, cluster: %s to group: 0x%04x" - destination_address = zdo_types.MultiAddress() - destination_address.addrmode = types.uint8_t(1) - destination_address.nwk = types.uint16_t(group_id) - - tasks = [] - - for cluster_binding in cluster_bindings: - if cluster_binding.endpoint_id == 0: - continue - if ( - cluster_binding.id - in self._zigpy_device.endpoints[ - cluster_binding.endpoint_id - ].out_clusters - ): - op_params = ( - self.nwk, - operation.name, - str(self.ieee), - cluster_binding.endpoint_id, - cluster_binding.id, - group_id, - ) - zdo.debug(f"processing {op_msg}", *op_params) - tasks.append( - ( - zdo.request( - operation, - self.ieee, - cluster_binding.endpoint_id, - cluster_binding.id, - destination_address, - ), - op_msg, - op_params, - ) - ) - res = await asyncio.gather(*(t[0] for t in tasks), return_exceptions=True) - for outcome, log_msg in zip(res, tasks, strict=False): - if isinstance(outcome, Exception): - fmt = f"{log_msg[1]} failed: %s" - else: - fmt = f"{log_msg[1]} completed: %s" - zdo.debug(fmt, *(log_msg[2] + (outcome,))) - - def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (self.nwk, self.model, *args) - _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/discovery.py b/homeassistant/components/zha/core/discovery.py deleted file mode 100644 index 3c342d14060..00000000000 --- a/homeassistant/components/zha/core/discovery.py +++ /dev/null @@ -1,661 +0,0 @@ -"""Device discovery functions for Zigbee Home Automation.""" - -from __future__ import annotations - -from collections import Counter -from collections.abc import Callable -import logging -from typing import TYPE_CHECKING, Any, cast - -from slugify import slugify -from zigpy.quirks.v2 import ( - BinarySensorMetadata, - CustomDeviceV2, - EntityType, - NumberMetadata, - SwitchMetadata, - WriteAttributeButtonMetadata, - ZCLCommandButtonMetadata, - ZCLEnumMetadata, - ZCLSensorMetadata, -) -from zigpy.state import State -from zigpy.zcl import ClusterType -from zigpy.zcl.clusters.general import Ota - -from homeassistant.const import CONF_TYPE, Platform -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.entity_registry import async_entries_for_device -from homeassistant.helpers.typing import ConfigType - -from .. import ( # noqa: F401 - alarm_control_panel, - binary_sensor, - button, - climate, - cover, - device_tracker, - fan, - light, - lock, - number, - select, - sensor, - siren, - switch, - update, -) -from . import const as zha_const, registries as zha_regs - -# importing cluster handlers updates registries -from .cluster_handlers import ( # noqa: F401 - ClusterHandler, - closures, - general, - homeautomation, - hvac, - lighting, - lightlink, - manufacturerspecific, - measurement, - protocol, - security, - smartenergy, -) -from .helpers import get_zha_data, get_zha_gateway - -if TYPE_CHECKING: - from ..entity import ZhaEntity - from .device import ZHADevice - from .endpoint import Endpoint - from .group import ZHAGroup - -_LOGGER = logging.getLogger(__name__) - - -QUIRKS_ENTITY_META_TO_ENTITY_CLASS = { - ( - Platform.BUTTON, - WriteAttributeButtonMetadata, - EntityType.CONFIG, - ): button.ZHAAttributeButton, - ( - Platform.BUTTON, - WriteAttributeButtonMetadata, - EntityType.STANDARD, - ): button.ZHAAttributeButton, - (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.CONFIG): button.ZHAButton, - ( - Platform.BUTTON, - ZCLCommandButtonMetadata, - EntityType.DIAGNOSTIC, - ): button.ZHAButton, - (Platform.BUTTON, ZCLCommandButtonMetadata, EntityType.STANDARD): button.ZHAButton, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.CONFIG, - ): binary_sensor.BinarySensor, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.DIAGNOSTIC, - ): binary_sensor.BinarySensor, - ( - Platform.BINARY_SENSOR, - BinarySensorMetadata, - EntityType.STANDARD, - ): binary_sensor.BinarySensor, - (Platform.SENSOR, ZCLEnumMetadata, EntityType.DIAGNOSTIC): sensor.EnumSensor, - (Platform.SENSOR, ZCLEnumMetadata, EntityType.STANDARD): sensor.EnumSensor, - (Platform.SENSOR, ZCLSensorMetadata, EntityType.DIAGNOSTIC): sensor.Sensor, - (Platform.SENSOR, ZCLSensorMetadata, EntityType.STANDARD): sensor.Sensor, - (Platform.SELECT, ZCLEnumMetadata, EntityType.CONFIG): select.ZCLEnumSelectEntity, - (Platform.SELECT, ZCLEnumMetadata, EntityType.STANDARD): select.ZCLEnumSelectEntity, - ( - Platform.SELECT, - ZCLEnumMetadata, - EntityType.DIAGNOSTIC, - ): select.ZCLEnumSelectEntity, - ( - Platform.NUMBER, - NumberMetadata, - EntityType.CONFIG, - ): number.ZHANumberConfigurationEntity, - (Platform.NUMBER, NumberMetadata, EntityType.DIAGNOSTIC): number.ZhaNumber, - (Platform.NUMBER, NumberMetadata, EntityType.STANDARD): number.ZhaNumber, - ( - Platform.SWITCH, - SwitchMetadata, - EntityType.CONFIG, - ): switch.ZHASwitchConfigurationEntity, - (Platform.SWITCH, SwitchMetadata, EntityType.STANDARD): switch.Switch, -} - - -@callback -async def async_add_entities( - _async_add_entities: AddEntitiesCallback, - entities: list[ - tuple[ - type[ZhaEntity], - tuple[str, ZHADevice, list[ClusterHandler]], - dict[str, Any], - ] - ], - **kwargs, -) -> None: - """Add entities helper.""" - if not entities: - return - - to_add = [ - ent_cls.create_entity(*args, **{**kwargs, **kw_args}) - for ent_cls, args, kw_args in entities - ] - entities_to_add = [entity for entity in to_add if entity is not None] - _async_add_entities(entities_to_add, update_before_add=False) - entities.clear() - - -class ProbeEndpoint: - """All discovered cluster handlers and entities of an endpoint.""" - - def __init__(self) -> None: - """Initialize instance.""" - self._device_configs: ConfigType = {} - - @callback - def discover_entities(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - _LOGGER.debug( - "Discovering entities for endpoint: %s-%s", - str(endpoint.device.ieee), - endpoint.id, - ) - self.discover_by_device_type(endpoint) - self.discover_multi_entities(endpoint) - self.discover_by_cluster_id(endpoint) - self.discover_multi_entities(endpoint, config_diagnostic_entities=True) - zha_regs.ZHA_ENTITIES.clean_up() - - @callback - def discover_device_entities(self, device: ZHADevice) -> None: - """Discover entities for a ZHA device.""" - _LOGGER.debug( - "Discovering entities for device: %s-%s", - str(device.ieee), - device.name, - ) - - if device.is_coordinator: - self.discover_coordinator_device_entities(device) - return - - self.discover_quirks_v2_entities(device) - zha_regs.ZHA_ENTITIES.clean_up() - - @callback - def discover_quirks_v2_entities(self, device: ZHADevice) -> None: - """Discover entities for a ZHA device exposed by quirks v2.""" - _LOGGER.debug( - "Attempting to discover quirks v2 entities for device: %s-%s", - str(device.ieee), - device.name, - ) - - if not isinstance(device.device, CustomDeviceV2): - _LOGGER.debug( - "Device: %s-%s is not a quirks v2 device - skipping " - "discover_quirks_v2_entities", - str(device.ieee), - device.name, - ) - return - - zigpy_device: CustomDeviceV2 = device.device - - if not zigpy_device.exposes_metadata: - _LOGGER.debug( - "Device: %s-%s does not expose any quirks v2 entities", - str(device.ieee), - device.name, - ) - return - - for ( - cluster_details, - entity_metadata_list, - ) in zigpy_device.exposes_metadata.items(): - endpoint_id, cluster_id, cluster_type = cluster_details - - if endpoint_id not in device.endpoints: - _LOGGER.warning( - "Device: %s-%s does not have an endpoint with id: %s - unable to " - "create entity with cluster details: %s", - str(device.ieee), - device.name, - endpoint_id, - cluster_details, - ) - continue - - endpoint: Endpoint = device.endpoints[endpoint_id] - cluster = ( - endpoint.zigpy_endpoint.in_clusters.get(cluster_id) - if cluster_type is ClusterType.Server - else endpoint.zigpy_endpoint.out_clusters.get(cluster_id) - ) - - if cluster is None: - _LOGGER.warning( - "Device: %s-%s does not have a cluster with id: %s - " - "unable to create entity with cluster details: %s", - str(device.ieee), - device.name, - cluster_id, - cluster_details, - ) - continue - - cluster_handler_id = f"{endpoint.id}:0x{cluster.cluster_id:04x}" - cluster_handler = ( - endpoint.all_cluster_handlers.get(cluster_handler_id) - if cluster_type is ClusterType.Server - else endpoint.client_cluster_handlers.get(cluster_handler_id) - ) - assert cluster_handler - - for entity_metadata in entity_metadata_list: - platform = Platform(entity_metadata.entity_platform.value) - metadata_type = type(entity_metadata) - entity_class = QUIRKS_ENTITY_META_TO_ENTITY_CLASS.get( - (platform, metadata_type, entity_metadata.entity_type) - ) - - if entity_class is None: - _LOGGER.warning( - "Device: %s-%s has an entity with details: %s that does not" - " have an entity class mapping - unable to create entity", - str(device.ieee), - device.name, - { - zha_const.CLUSTER_DETAILS: cluster_details, - zha_const.ENTITY_METADATA: entity_metadata, - }, - ) - continue - - # automatically add the attribute to ZCL_INIT_ATTRS for the cluster - # handler if it is not already in the list - if ( - hasattr(entity_metadata, "attribute_name") - and entity_metadata.attribute_name - not in cluster_handler.ZCL_INIT_ATTRS - ): - init_attrs = cluster_handler.ZCL_INIT_ATTRS.copy() - init_attrs[entity_metadata.attribute_name] = ( - entity_metadata.attribute_initialized_from_cache - ) - cluster_handler.__dict__[zha_const.ZCL_INIT_ATTRS] = init_attrs - - endpoint.async_new_entity( - platform, - entity_class, - endpoint.unique_id, - [cluster_handler], - entity_metadata=entity_metadata, - ) - - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - platform, - entity_class.__name__, - [cluster_handler.name], - ) - - @callback - def discover_coordinator_device_entities(self, device: ZHADevice) -> None: - """Discover entities for the coordinator device.""" - _LOGGER.debug( - "Discovering entities for coordinator device: %s-%s", - str(device.ieee), - device.name, - ) - state: State = device.gateway.application_controller.state - platforms: dict[Platform, list] = get_zha_data(device.hass).platforms - - @callback - def process_counters(counter_groups: str) -> None: - for counter_group, counters in getattr(state, counter_groups).items(): - for counter in counters: - platforms[Platform.SENSOR].append( - ( - sensor.DeviceCounterSensor, - ( - f"{slugify(str(device.ieee))}_{counter_groups}_{counter_group}_{counter}", - device, - counter_groups, - counter_group, - counter, - ), - {}, - ) - ) - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - Platform.SENSOR, - sensor.DeviceCounterSensor.__name__, - f"counter groups[{counter_groups}] counter group[{counter_group}] counter[{counter}]", - ) - - process_counters("counters") - process_counters("broadcast_counters") - process_counters("device_counters") - process_counters("group_counters") - - @callback - def discover_by_device_type(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - - unique_id = endpoint.unique_id - - platform: str | None = self._device_configs.get(unique_id, {}).get(CONF_TYPE) - if platform is None: - ep_profile_id = endpoint.zigpy_endpoint.profile_id - ep_device_type = endpoint.zigpy_endpoint.device_type - platform = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) - - if platform and platform in zha_const.PLATFORMS: - platform = cast(Platform, platform) - - cluster_handlers = endpoint.unclaimed_cluster_handlers() - platform_entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( - platform, - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handlers, - endpoint.device.quirk_id, - ) - if platform_entity_class is None: - return - endpoint.claim_cluster_handlers(claimed) - endpoint.async_new_entity( - platform, platform_entity_class, unique_id, claimed - ) - - @callback - def discover_by_cluster_id(self, endpoint: Endpoint) -> None: - """Process an endpoint on a zigpy device.""" - - items = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.items() - single_input_clusters = { - cluster_class: match - for cluster_class, match in items - if not isinstance(cluster_class, int) - } - remaining_cluster_handlers = endpoint.unclaimed_cluster_handlers() - for cluster_handler in remaining_cluster_handlers: - if ( - cluster_handler.cluster.cluster_id - in zha_regs.CLUSTER_HANDLER_ONLY_CLUSTERS - ): - endpoint.claim_cluster_handlers([cluster_handler]) - continue - - platform = zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS.get( - cluster_handler.cluster.cluster_id - ) - if platform is None: - for cluster_class, match in single_input_clusters.items(): - if isinstance(cluster_handler.cluster, cluster_class): - platform = match - break - - self.probe_single_cluster(platform, cluster_handler, endpoint) - - # until we can get rid of registries - self.handle_on_off_output_cluster_exception(endpoint) - - @staticmethod - def probe_single_cluster( - platform: Platform | None, - cluster_handler: ClusterHandler, - endpoint: Endpoint, - ) -> None: - """Probe specified cluster for specific component.""" - if platform is None or platform not in zha_const.PLATFORMS: - return - cluster_handler_list = [cluster_handler] - unique_id = f"{endpoint.unique_id}-{cluster_handler.cluster.cluster_id}" - - entity_class, claimed = zha_regs.ZHA_ENTITIES.get_entity( - platform, - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handler_list, - endpoint.device.quirk_id, - ) - if entity_class is None: - return - endpoint.claim_cluster_handlers(claimed) - endpoint.async_new_entity(platform, entity_class, unique_id, claimed) - - def handle_on_off_output_cluster_exception(self, endpoint: Endpoint) -> None: - """Process output clusters of the endpoint.""" - - profile_id = endpoint.zigpy_endpoint.profile_id - device_type = endpoint.zigpy_endpoint.device_type - if device_type in zha_regs.REMOTE_DEVICE_TYPES.get(profile_id, []): - return - - for cluster_id, cluster in endpoint.zigpy_endpoint.out_clusters.items(): - platform = zha_regs.SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS.get( - cluster.cluster_id - ) - if platform is None: - continue - - cluster_handler_classes = zha_regs.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: ClusterHandler} - ) - - quirk_id = ( - endpoint.device.quirk_id - if endpoint.device.quirk_id in cluster_handler_classes - else None - ) - - cluster_handler_class = cluster_handler_classes.get( - quirk_id, ClusterHandler - ) - - cluster_handler = cluster_handler_class(cluster, endpoint) - self.probe_single_cluster(platform, cluster_handler, endpoint) - - @staticmethod - @callback - def discover_multi_entities( - endpoint: Endpoint, - config_diagnostic_entities: bool = False, - ) -> None: - """Process an endpoint on and discover multiple entities.""" - - ep_profile_id = endpoint.zigpy_endpoint.profile_id - ep_device_type = endpoint.zigpy_endpoint.device_type - cmpt_by_dev_type = zha_regs.DEVICE_CLASS[ep_profile_id].get(ep_device_type) - - if config_diagnostic_entities: - cluster_handlers = list(endpoint.all_cluster_handlers.values()) - ota_handler_id = f"{endpoint.id}:0x{Ota.cluster_id:04x}" - if ota_handler_id in endpoint.client_cluster_handlers: - cluster_handlers.append( - endpoint.client_cluster_handlers[ota_handler_id] - ) - matches, claimed = zha_regs.ZHA_ENTITIES.get_config_diagnostic_entity( - endpoint.device.manufacturer, - endpoint.device.model, - cluster_handlers, - endpoint.device.quirk_id, - ) - else: - matches, claimed = zha_regs.ZHA_ENTITIES.get_multi_entity( - endpoint.device.manufacturer, - endpoint.device.model, - endpoint.unclaimed_cluster_handlers(), - endpoint.device.quirk_id, - ) - - endpoint.claim_cluster_handlers(claimed) - for platform, ent_n_handler_list in matches.items(): - for entity_and_handler in ent_n_handler_list: - _LOGGER.debug( - "'%s' platform -> '%s' using %s", - platform, - entity_and_handler.entity_class.__name__, - [ch.name for ch in entity_and_handler.claimed_cluster_handlers], - ) - for platform, ent_n_handler_list in matches.items(): - for entity_and_handler in ent_n_handler_list: - if platform == cmpt_by_dev_type: - # for well known device types, - # like thermostats we'll take only 1st class - endpoint.async_new_entity( - platform, - entity_and_handler.entity_class, - endpoint.unique_id, - entity_and_handler.claimed_cluster_handlers, - ) - break - first_ch = entity_and_handler.claimed_cluster_handlers[0] - endpoint.async_new_entity( - platform, - entity_and_handler.entity_class, - f"{endpoint.unique_id}-{first_ch.cluster.cluster_id}", - entity_and_handler.claimed_cluster_handlers, - ) - - def initialize(self, hass: HomeAssistant) -> None: - """Update device overrides config.""" - zha_config = get_zha_data(hass).yaml_config - if overrides := zha_config.get(zha_const.CONF_DEVICE_CONFIG): - self._device_configs.update(overrides) - - -class GroupProbe: - """Determine the appropriate component for a group.""" - - _hass: HomeAssistant - - def __init__(self) -> None: - """Initialize instance.""" - self._unsubs: list[Callable[[], None]] = [] - - def initialize(self, hass: HomeAssistant) -> None: - """Initialize the group probe.""" - self._hass = hass - self._unsubs.append( - async_dispatcher_connect( - hass, zha_const.SIGNAL_GROUP_ENTITY_REMOVED, self._reprobe_group - ) - ) - - def cleanup(self) -> None: - """Clean up on when ZHA shuts down.""" - for unsub in self._unsubs[:]: - unsub() - self._unsubs.remove(unsub) - - @callback - def _reprobe_group(self, group_id: int) -> None: - """Reprobe a group for entities after its members change.""" - zha_gateway = get_zha_gateway(self._hass) - if (zha_group := zha_gateway.groups.get(group_id)) is None: - return - self.discover_group_entities(zha_group) - - @callback - def discover_group_entities(self, group: ZHAGroup) -> None: - """Process a group and create any entities that are needed.""" - # only create a group entity if there are 2 or more members in a group - if len(group.members) < 2: - _LOGGER.debug( - "Group: %s:0x%04x has less than 2 members - skipping entity discovery", - group.name, - group.group_id, - ) - return - - entity_domains = GroupProbe.determine_entity_domains(self._hass, group) - - if not entity_domains: - return - - zha_data = get_zha_data(self._hass) - zha_gateway = get_zha_gateway(self._hass) - - for domain in entity_domains: - entity_class = zha_regs.ZHA_ENTITIES.get_group_entity(domain) - if entity_class is None: - continue - zha_data.platforms[domain].append( - ( - entity_class, - ( - group.get_domain_entity_ids(domain), - f"{domain}_zha_group_0x{group.group_id:04x}", - group.group_id, - zha_gateway.coordinator_zha_device, - ), - {}, - ) - ) - async_dispatcher_send(self._hass, zha_const.SIGNAL_ADD_ENTITIES) - - @staticmethod - def determine_entity_domains( - hass: HomeAssistant, group: ZHAGroup - ) -> list[Platform]: - """Determine the entity domains for this group.""" - entity_registry = er.async_get(hass) - - entity_domains: list[Platform] = [] - all_domain_occurrences: list[Platform] = [] - - for member in group.members: - if member.device.is_coordinator: - continue - entities = async_entries_for_device( - entity_registry, - member.device.device_id, - include_disabled_entities=True, - ) - all_domain_occurrences.extend( - [ - cast(Platform, entity.domain) - for entity in entities - if entity.domain in zha_regs.GROUP_ENTITY_DOMAINS - ] - ) - if not all_domain_occurrences: - return entity_domains - # get all domains we care about if there are more than 2 entities of this domain - counts = Counter(all_domain_occurrences) - entity_domains = [domain[0] for domain in counts.items() if domain[1] >= 2] - _LOGGER.debug( - "The entity domains are: %s for group: %s:0x%04x", - entity_domains, - group.name, - group.group_id, - ) - return entity_domains - - -PROBE = ProbeEndpoint() -GROUP_PROBE = GroupProbe() diff --git a/homeassistant/components/zha/core/endpoint.py b/homeassistant/components/zha/core/endpoint.py deleted file mode 100644 index 32483a3bc53..00000000000 --- a/homeassistant/components/zha/core/endpoint.py +++ /dev/null @@ -1,253 +0,0 @@ -"""Representation of a Zigbee endpoint for zha.""" - -from __future__ import annotations - -import asyncio -from collections.abc import Awaitable, Callable -import functools -import logging -from typing import TYPE_CHECKING, Any, Final - -from homeassistant.const import Platform -from homeassistant.core import callback -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.util.async_ import gather_with_limited_concurrency - -from . import const, discovery, registries -from .cluster_handlers import ClusterHandler -from .helpers import get_zha_data - -if TYPE_CHECKING: - from zigpy import Endpoint as ZigpyEndpoint - - from .cluster_handlers import ClientClusterHandler - from .device import ZHADevice - -ATTR_DEVICE_TYPE: Final[str] = "device_type" -ATTR_PROFILE_ID: Final[str] = "profile_id" -ATTR_IN_CLUSTERS: Final[str] = "input_clusters" -ATTR_OUT_CLUSTERS: Final[str] = "output_clusters" - -_LOGGER = logging.getLogger(__name__) - - -class Endpoint: - """Endpoint for a zha device.""" - - def __init__(self, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> None: - """Initialize instance.""" - assert zigpy_endpoint is not None - assert device is not None - self._zigpy_endpoint: ZigpyEndpoint = zigpy_endpoint - self._device: ZHADevice = device - self._all_cluster_handlers: dict[str, ClusterHandler] = {} - self._claimed_cluster_handlers: dict[str, ClusterHandler] = {} - self._client_cluster_handlers: dict[str, ClientClusterHandler] = {} - self._unique_id: str = f"{device.ieee!s}-{zigpy_endpoint.endpoint_id}" - - @property - def device(self) -> ZHADevice: - """Return the device this endpoint belongs to.""" - return self._device - - @property - def all_cluster_handlers(self) -> dict[str, ClusterHandler]: - """All server cluster handlers of an endpoint.""" - return self._all_cluster_handlers - - @property - def claimed_cluster_handlers(self) -> dict[str, ClusterHandler]: - """Cluster handlers in use.""" - return self._claimed_cluster_handlers - - @property - def client_cluster_handlers(self) -> dict[str, ClientClusterHandler]: - """Return a dict of client cluster handlers.""" - return self._client_cluster_handlers - - @property - def zigpy_endpoint(self) -> ZigpyEndpoint: - """Return endpoint of zigpy device.""" - return self._zigpy_endpoint - - @property - def id(self) -> int: - """Return endpoint id.""" - return self._zigpy_endpoint.endpoint_id - - @property - def unique_id(self) -> str: - """Return the unique id for this endpoint.""" - return self._unique_id - - @property - def zigbee_signature(self) -> tuple[int, dict[str, Any]]: - """Get the zigbee signature for the endpoint this pool represents.""" - return ( - self.id, - { - ATTR_PROFILE_ID: f"0x{self._zigpy_endpoint.profile_id:04x}" - if self._zigpy_endpoint.profile_id is not None - else "", - ATTR_DEVICE_TYPE: f"0x{self._zigpy_endpoint.device_type:04x}" - if self._zigpy_endpoint.device_type is not None - else "", - ATTR_IN_CLUSTERS: [ - f"0x{cluster_id:04x}" - for cluster_id in sorted(self._zigpy_endpoint.in_clusters) - ], - ATTR_OUT_CLUSTERS: [ - f"0x{cluster_id:04x}" - for cluster_id in sorted(self._zigpy_endpoint.out_clusters) - ], - }, - ) - - @classmethod - def new(cls, zigpy_endpoint: ZigpyEndpoint, device: ZHADevice) -> Endpoint: - """Create new endpoint and populate cluster handlers.""" - endpoint = cls(zigpy_endpoint, device) - endpoint.add_all_cluster_handlers() - endpoint.add_client_cluster_handlers() - if not device.is_coordinator: - discovery.PROBE.discover_entities(endpoint) - return endpoint - - def add_all_cluster_handlers(self) -> None: - """Create and add cluster handlers for all input clusters.""" - for cluster_id, cluster in self.zigpy_endpoint.in_clusters.items(): - cluster_handler_classes = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: ClusterHandler} - ) - quirk_id = ( - self.device.quirk_id - if self.device.quirk_id in cluster_handler_classes - else None - ) - cluster_handler_class = cluster_handler_classes.get( - quirk_id, ClusterHandler - ) - - # Allow cluster handler to filter out bad matches - if not cluster_handler_class.matches(cluster, self): - cluster_handler_class = ClusterHandler - - _LOGGER.debug( - "Creating cluster handler for cluster id: %s class: %s", - cluster_id, - cluster_handler_class, - ) - - try: - cluster_handler = cluster_handler_class(cluster, self) - except KeyError as err: - _LOGGER.warning( - "Cluster handler %s for cluster %s on endpoint %s is invalid: %s", - cluster_handler_class, - cluster, - self, - err, - ) - continue - - if cluster_handler.name == const.CLUSTER_HANDLER_POWER_CONFIGURATION: - self._device.power_configuration_ch = cluster_handler - elif cluster_handler.name == const.CLUSTER_HANDLER_IDENTIFY: - self._device.identify_ch = cluster_handler - elif cluster_handler.name == const.CLUSTER_HANDLER_BASIC: - self._device.basic_ch = cluster_handler - self._all_cluster_handlers[cluster_handler.id] = cluster_handler - - def add_client_cluster_handlers(self) -> None: - """Create client cluster handlers for all output clusters if in the registry.""" - for ( - cluster_id, - cluster_handler_class, - ) in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.items(): - cluster = self.zigpy_endpoint.out_clusters.get(cluster_id) - if cluster is not None: - cluster_handler = cluster_handler_class(cluster, self) - self.client_cluster_handlers[cluster_handler.id] = cluster_handler - - async def async_initialize(self, from_cache: bool = False) -> None: - """Initialize claimed cluster handlers.""" - await self._execute_handler_tasks( - "async_initialize", from_cache, max_concurrency=1 - ) - - async def async_configure(self) -> None: - """Configure claimed cluster handlers.""" - await self._execute_handler_tasks("async_configure") - - async def _execute_handler_tasks( - self, func_name: str, *args: Any, max_concurrency: int | None = None - ) -> None: - """Add a throttled cluster handler task and swallow exceptions.""" - cluster_handlers = [ - *self.claimed_cluster_handlers.values(), - *self.client_cluster_handlers.values(), - ] - tasks = [getattr(ch, func_name)(*args) for ch in cluster_handlers] - - gather: Callable[..., Awaitable] - - if max_concurrency is None: - gather = asyncio.gather - else: - gather = functools.partial(gather_with_limited_concurrency, max_concurrency) - - results = await gather(*tasks, return_exceptions=True) - for cluster_handler, outcome in zip(cluster_handlers, results, strict=False): - if isinstance(outcome, Exception): - cluster_handler.debug( - "'%s' stage failed: %s", func_name, str(outcome), exc_info=outcome - ) - else: - cluster_handler.debug("'%s' stage succeeded", func_name) - - def async_new_entity( - self, - platform: Platform, - entity_class: type, - unique_id: str, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Create a new entity.""" - from .device import DeviceStatus # pylint: disable=import-outside-toplevel - - if self.device.status == DeviceStatus.INITIALIZED: - return - - zha_data = get_zha_data(self.device.hass) - zha_data.platforms[platform].append( - (entity_class, (unique_id, self.device, cluster_handlers), kwargs or {}) - ) - - @callback - def async_send_signal(self, signal: str, *args: Any) -> None: - """Send a signal through hass dispatcher.""" - async_dispatcher_send(self.device.hass, signal, *args) - - def send_event(self, signal: dict[str, Any]) -> None: - """Broadcast an event from this endpoint.""" - self.device.zha_send_event( - { - const.ATTR_UNIQUE_ID: self.unique_id, - const.ATTR_ENDPOINT_ID: self.id, - **signal, - } - ) - - def claim_cluster_handlers(self, cluster_handlers: list[ClusterHandler]) -> None: - """Claim cluster handlers.""" - self.claimed_cluster_handlers.update({ch.id: ch for ch in cluster_handlers}) - - def unclaimed_cluster_handlers(self) -> list[ClusterHandler]: - """Return a list of available (unclaimed) cluster handlers.""" - claimed = set(self.claimed_cluster_handlers) - available = set(self.all_cluster_handlers) - return [ - self.all_cluster_handlers[cluster_id] - for cluster_id in (available - claimed) - ] diff --git a/homeassistant/components/zha/core/gateway.py b/homeassistant/components/zha/core/gateway.py deleted file mode 100644 index 8b8826e2648..00000000000 --- a/homeassistant/components/zha/core/gateway.py +++ /dev/null @@ -1,882 +0,0 @@ -"""Virtual gateway for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -import collections -from collections.abc import Callable -from contextlib import suppress -from datetime import timedelta -from enum import Enum -import itertools -import logging -import re -import time -from typing import TYPE_CHECKING, Any, NamedTuple, Self, cast - -from zigpy.application import ControllerApplication -from zigpy.config import ( - CONF_DATABASE, - CONF_DEVICE, - CONF_DEVICE_PATH, - CONF_NWK, - CONF_NWK_CHANNEL, - CONF_NWK_VALIDATE_SETTINGS, -) -import zigpy.device -import zigpy.endpoint -import zigpy.group -from zigpy.state import State -from zigpy.types.named import EUI64 - -from homeassistant import __path__ as HOMEASSISTANT_PATH -from homeassistant.components.system_log import LogEntry -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType -from homeassistant.util.async_ import gather_with_limited_concurrency - -from . import discovery -from .const import ( - ATTR_IEEE, - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_NWK, - ATTR_SIGNATURE, - ATTR_TYPE, - CONF_RADIO_TYPE, - CONF_USE_THREAD, - CONF_ZIGPY, - DATA_ZHA, - DEBUG_COMP_BELLOWS, - DEBUG_COMP_ZHA, - DEBUG_COMP_ZIGPY, - DEBUG_COMP_ZIGPY_DECONZ, - DEBUG_COMP_ZIGPY_XBEE, - DEBUG_COMP_ZIGPY_ZIGATE, - DEBUG_COMP_ZIGPY_ZNP, - DEBUG_LEVEL_CURRENT, - DEBUG_LEVEL_ORIGINAL, - DEBUG_LEVELS, - DEBUG_RELAY_LOGGERS, - DEFAULT_DATABASE_NAME, - DEVICE_PAIRING_STATUS, - DOMAIN, - SIGNAL_ADD_ENTITIES, - SIGNAL_GROUP_MEMBERSHIP_CHANGE, - SIGNAL_REMOVE, - UNKNOWN_MANUFACTURER, - UNKNOWN_MODEL, - ZHA_GW_MSG, - ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO, - ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_GROUP_ADDED, - ZHA_GW_MSG_GROUP_INFO, - ZHA_GW_MSG_GROUP_MEMBER_ADDED, - ZHA_GW_MSG_GROUP_MEMBER_REMOVED, - ZHA_GW_MSG_GROUP_REMOVED, - ZHA_GW_MSG_LOG_ENTRY, - ZHA_GW_MSG_LOG_OUTPUT, - ZHA_GW_MSG_RAW_INIT, - RadioType, -) -from .device import DeviceStatus, ZHADevice -from .group import GroupMember, ZHAGroup -from .helpers import get_zha_data -from .registries import GROUP_ENTITY_DOMAINS - -if TYPE_CHECKING: - from logging import Filter, LogRecord - - from ..entity import ZhaEntity - from .cluster_handlers import ClusterHandler - - type _LogFilterType = Filter | Callable[[LogRecord], bool] - -_LOGGER = logging.getLogger(__name__) - - -class EntityReference(NamedTuple): - """Describes an entity reference.""" - - reference_id: str - zha_device: ZHADevice - cluster_handlers: dict[str, ClusterHandler] - device_info: DeviceInfo - remove_future: asyncio.Future[Any] - - -class DevicePairingStatus(Enum): - """Status of a device.""" - - PAIRED = 1 - INTERVIEW_COMPLETE = 2 - CONFIGURED = 3 - INITIALIZED = 4 - - -class ZHAGateway: - """Gateway that handles events that happen on the ZHA Zigbee network.""" - - def __init__( - self, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry - ) -> None: - """Initialize the gateway.""" - self.hass = hass - self._config = config - self._devices: dict[EUI64, ZHADevice] = {} - self._groups: dict[int, ZHAGroup] = {} - self.application_controller: ControllerApplication = None - self.coordinator_zha_device: ZHADevice = None # type: ignore[assignment] - self._device_registry: collections.defaultdict[EUI64, list[EntityReference]] = ( - collections.defaultdict(list) - ) - self._log_levels: dict[str, dict[str, int]] = { - DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), - DEBUG_LEVEL_CURRENT: async_capture_log_levels(), - } - self.debug_enabled = False - self._log_relay_handler = LogRelayHandler(hass, self) - self.config_entry = config_entry - self._unsubs: list[Callable[[], None]] = [] - - self.shutting_down = False - self._reload_task: asyncio.Task | None = None - - def get_application_controller_data(self) -> tuple[ControllerApplication, dict]: - """Get an uninitialized instance of a zigpy `ControllerApplication`.""" - radio_type = RadioType[self.config_entry.data[CONF_RADIO_TYPE]] - - app_config = self._config.get(CONF_ZIGPY, {}) - database = self._config.get( - CONF_DATABASE, - self.hass.config.path(DEFAULT_DATABASE_NAME), - ) - app_config[CONF_DATABASE] = database - app_config[CONF_DEVICE] = self.config_entry.data[CONF_DEVICE] - - if CONF_NWK_VALIDATE_SETTINGS not in app_config: - app_config[CONF_NWK_VALIDATE_SETTINGS] = True - - # The bellows UART thread sometimes propagates a cancellation into the main Core - # event loop, when a connection to a TCP coordinator fails in a specific way - if ( - CONF_USE_THREAD not in app_config - and radio_type is RadioType.ezsp - and app_config[CONF_DEVICE][CONF_DEVICE_PATH].startswith("socket://") - ): - app_config[CONF_USE_THREAD] = False - - # Local import to avoid circular dependencies - # pylint: disable-next=import-outside-toplevel - from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( - is_multiprotocol_url, - ) - - # Until we have a way to coordinate channels with the Thread half of multi-PAN, - # stick to the old zigpy default of channel 15 instead of dynamically scanning - if ( - is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) - and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None - ): - app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 - - return radio_type.controller, radio_type.controller.SCHEMA(app_config) - - @classmethod - async def async_from_config( - cls, hass: HomeAssistant, config: ConfigType, config_entry: ConfigEntry - ) -> Self: - """Create an instance of a gateway from config objects.""" - instance = cls(hass, config, config_entry) - await instance.async_initialize() - return instance - - async def async_initialize(self) -> None: - """Initialize controller and connect radio.""" - discovery.PROBE.initialize(self.hass) - discovery.GROUP_PROBE.initialize(self.hass) - - self.shutting_down = False - - app_controller_cls, app_config = self.get_application_controller_data() - app = await app_controller_cls.new( - config=app_config, - auto_form=False, - start_radio=False, - ) - - try: - await app.startup(auto_form=True) - except Exception: - # Explicitly shut down the controller application on failure - await app.shutdown() - raise - - self.application_controller = app - - zha_data = get_zha_data(self.hass) - zha_data.gateway = self - - self.coordinator_zha_device = self._async_get_or_create_device( - self._find_coordinator_device() - ) - - self.async_load_devices() - self.async_load_groups() - - self.application_controller.add_listener(self) - self.application_controller.groups.add_listener(self) - - def connection_lost(self, exc: Exception) -> None: - """Handle connection lost event.""" - _LOGGER.debug("Connection to the radio was lost: %r", exc) - - if self.shutting_down: - return - - # Ensure we do not queue up multiple resets - if self._reload_task is not None: - _LOGGER.debug("Ignoring reset, one is already running") - return - - self._reload_task = self.hass.async_create_task( - self.hass.config_entries.async_reload(self.config_entry.entry_id) - ) - - def _find_coordinator_device(self) -> zigpy.device.Device: - zigpy_coordinator = self.application_controller.get_device(nwk=0x0000) - - if last_backup := self.application_controller.backups.most_recent_backup(): - with suppress(KeyError): - zigpy_coordinator = self.application_controller.get_device( - ieee=last_backup.node_info.ieee - ) - - return zigpy_coordinator - - @callback - def async_load_devices(self) -> None: - """Restore ZHA devices from zigpy application state.""" - - for zigpy_device in self.application_controller.devices.values(): - zha_device = self._async_get_or_create_device(zigpy_device) - delta_msg = "not known" - if zha_device.last_seen is not None: - delta = round(time.time() - zha_device.last_seen) - delta_msg = f"{timedelta(seconds=delta)!s} ago" - _LOGGER.debug( - ( - "[%s](%s) restored as '%s', last seen: %s," - " consider_unavailable_time: %s seconds" - ), - zha_device.nwk, - zha_device.name, - "available" if zha_device.available else "unavailable", - delta_msg, - zha_device.consider_unavailable_time, - ) - - @callback - def async_load_groups(self) -> None: - """Initialize ZHA groups.""" - - for group_id in self.application_controller.groups: - group = self.application_controller.groups[group_id] - zha_group = self._async_get_or_create_group(group) - # we can do this here because the entities are in the - # entity registry tied to the devices - discovery.GROUP_PROBE.discover_group_entities(zha_group) - - @property - def radio_concurrency(self) -> int: - """Maximum configured radio concurrency.""" - return self.application_controller._concurrent_requests_semaphore.max_value # noqa: SLF001 - - async def async_fetch_updated_state_mains(self) -> None: - """Fetch updated state for mains powered devices.""" - _LOGGER.debug("Fetching current state for mains powered devices") - - now = time.time() - - # Only delay startup to poll mains-powered devices that are online - online_devices = [ - dev - for dev in self.devices.values() - if dev.is_mains_powered - and dev.last_seen is not None - and (now - dev.last_seen) < dev.consider_unavailable_time - ] - - # Prioritize devices that have recently been contacted - online_devices.sort(key=lambda dev: cast(float, dev.last_seen), reverse=True) - - # Make sure that we always leave slots for non-startup requests - max_poll_concurrency = max(1, self.radio_concurrency - 4) - - await gather_with_limited_concurrency( - max_poll_concurrency, - *(dev.async_initialize(from_cache=False) for dev in online_devices), - ) - - _LOGGER.debug("completed fetching current state for mains powered devices") - - async def async_initialize_devices_and_entities(self) -> None: - """Initialize devices and load entities.""" - - _LOGGER.debug("Initializing all devices from Zigpy cache") - await asyncio.gather( - *(dev.async_initialize(from_cache=True) for dev in self.devices.values()) - ) - - async def fetch_updated_state() -> None: - """Fetch updated state for mains powered devices.""" - await self.async_fetch_updated_state_mains() - _LOGGER.debug("Allowing polled requests") - self.hass.data[DATA_ZHA].allow_polling = True - - # background the fetching of state for mains powered devices - self.config_entry.async_create_background_task( - self.hass, fetch_updated_state(), "zha.gateway-fetch_updated_state" - ) - - def device_joined(self, device: zigpy.device.Device) -> None: - """Handle device joined. - - At this point, no information about the device is known other than its - address - """ - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: device.nwk, - ATTR_IEEE: str(device.ieee), - DEVICE_PAIRING_STATUS: DevicePairingStatus.PAIRED.name, - }, - }, - ) - - def raw_device_initialized(self, device: zigpy.device.Device) -> None: - """Handle a device initialization without quirks loaded.""" - manuf = device.manufacturer - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, - ZHA_GW_MSG_DEVICE_INFO: { - ATTR_NWK: device.nwk, - ATTR_IEEE: str(device.ieee), - DEVICE_PAIRING_STATUS: DevicePairingStatus.INTERVIEW_COMPLETE.name, - ATTR_MODEL: device.model if device.model else UNKNOWN_MODEL, - ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, - ATTR_SIGNATURE: device.get_signature(), - }, - }, - ) - - def device_initialized(self, device: zigpy.device.Device) -> None: - """Handle device joined and basic information discovered.""" - self.hass.async_create_task(self.async_device_initialized(device)) - - def device_left(self, device: zigpy.device.Device) -> None: - """Handle device leaving the network.""" - self.async_update_device(device, False) - - def group_member_removed( - self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint - ) -> None: - """Handle zigpy group member removed event.""" - # need to handle endpoint correctly on groups - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_member_removed - endpoint: %s", endpoint) - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_REMOVED) - async_dispatcher_send( - self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" - ) - - def group_member_added( - self, zigpy_group: zigpy.group.Group, endpoint: zigpy.endpoint.Endpoint - ) -> None: - """Handle zigpy group member added event.""" - # need to handle endpoint correctly on groups - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_member_added - endpoint: %s", endpoint) - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_MEMBER_ADDED) - async_dispatcher_send( - self.hass, f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{zigpy_group.group_id:04x}" - ) - if len(zha_group.members) == 2: - # we need to do this because there wasn't already - # a group entity to remove and re-add - discovery.GROUP_PROBE.discover_group_entities(zha_group) - - def group_added(self, zigpy_group: zigpy.group.Group) -> None: - """Handle zigpy group added event.""" - zha_group = self._async_get_or_create_group(zigpy_group) - zha_group.info("group_added") - # need to dispatch for entity creation here - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_ADDED) - - def group_removed(self, zigpy_group: zigpy.group.Group) -> None: - """Handle zigpy group removed event.""" - self._send_group_gateway_message(zigpy_group, ZHA_GW_MSG_GROUP_REMOVED) - zha_group = self._groups.pop(zigpy_group.group_id) - zha_group.info("group_removed") - self._cleanup_group_entity_registry_entries(zigpy_group) - - def _send_group_gateway_message( - self, zigpy_group: zigpy.group.Group, gateway_message_type: str - ) -> None: - """Send the gateway event for a zigpy group event.""" - zha_group = self._groups.get(zigpy_group.group_id) - if zha_group is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: gateway_message_type, - ZHA_GW_MSG_GROUP_INFO: zha_group.group_info, - }, - ) - - async def _async_remove_device( - self, device: ZHADevice, entity_refs: list[EntityReference] | None - ) -> None: - if entity_refs is not None: - remove_tasks: list[asyncio.Future[Any]] = [ - entity_ref.remove_future for entity_ref in entity_refs - ] - if remove_tasks: - await asyncio.wait(remove_tasks) - - device_registry = dr.async_get(self.hass) - reg_device = device_registry.async_get(device.device_id) - if reg_device is not None: - device_registry.async_remove_device(reg_device.id) - - def device_removed(self, device: zigpy.device.Device) -> None: - """Handle device being removed from the network.""" - zha_device = self._devices.pop(device.ieee, None) - entity_refs = self._device_registry.pop(device.ieee, None) - if zha_device is not None: - device_info = zha_device.zha_device_info - zha_device.async_cleanup_handles() - async_dispatcher_send(self.hass, f"{SIGNAL_REMOVE}_{zha_device.ieee!s}") - self.hass.async_create_task( - self._async_remove_device(zha_device, entity_refs), - "ZHAGateway._async_remove_device", - ) - if device_info is not None: - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - def get_device(self, ieee: EUI64) -> ZHADevice | None: - """Return ZHADevice for given ieee.""" - return self._devices.get(ieee) - - def get_group(self, group_id: int) -> ZHAGroup | None: - """Return Group for given group id.""" - return self.groups.get(group_id) - - @callback - def async_get_group_by_name(self, group_name: str) -> ZHAGroup | None: - """Get ZHA group by name.""" - for group in self.groups.values(): - if group.name == group_name: - return group - return None - - def get_entity_reference(self, entity_id: str) -> EntityReference | None: - """Return entity reference for given entity_id if found.""" - for entity_reference in itertools.chain.from_iterable( - self.device_registry.values() - ): - if entity_id == entity_reference.reference_id: - return entity_reference - return None - - def remove_entity_reference(self, entity: ZhaEntity) -> None: - """Remove entity reference for given entity_id if found.""" - if entity.zha_device.ieee in self.device_registry: - entity_refs = self.device_registry.get(entity.zha_device.ieee) - self.device_registry[entity.zha_device.ieee] = [ - e - for e in entity_refs # type: ignore[union-attr] - if e.reference_id != entity.entity_id - ] - - def _cleanup_group_entity_registry_entries( - self, zigpy_group: zigpy.group.Group - ) -> None: - """Remove entity registry entries for group entities when the groups are removed from HA.""" - # first we collect the potential unique ids for entities that could be created from this group - possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" - for domain in GROUP_ENTITY_DOMAINS - ] - - # then we get all group entity entries tied to the coordinator - entity_registry = er.async_get(self.hass) - assert self.coordinator_zha_device - all_group_entity_entries = er.async_entries_for_device( - entity_registry, - self.coordinator_zha_device.device_id, - include_disabled_entities=True, - ) - - # then we get the entity entries for this specific group - # by getting the entries that match - entries_to_remove = [ - entry - for entry in all_group_entity_entries - if entry.unique_id in possible_entity_unique_ids - ] - - # then we remove the entries from the entity registry - for entry in entries_to_remove: - _LOGGER.debug( - "cleaning up entity registry entry for entity: %s", entry.entity_id - ) - entity_registry.async_remove(entry.entity_id) - - @property - def state(self) -> State: - """Return the active coordinator's network state.""" - return self.application_controller.state - - @property - def devices(self) -> dict[EUI64, ZHADevice]: - """Return devices.""" - return self._devices - - @property - def groups(self) -> dict[int, ZHAGroup]: - """Return groups.""" - return self._groups - - @property - def device_registry(self) -> collections.defaultdict[EUI64, list[EntityReference]]: - """Return entities by ieee.""" - return self._device_registry - - def register_entity_reference( - self, - ieee: EUI64, - reference_id: str, - zha_device: ZHADevice, - cluster_handlers: dict[str, ClusterHandler], - device_info: DeviceInfo, - remove_future: asyncio.Future[Any], - ): - """Record the creation of a hass entity associated with ieee.""" - self._device_registry[ieee].append( - EntityReference( - reference_id=reference_id, - zha_device=zha_device, - cluster_handlers=cluster_handlers, - device_info=device_info, - remove_future=remove_future, - ) - ) - - @callback - def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Enable debug mode for ZHA.""" - self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() - async_set_logger_levels(DEBUG_LEVELS) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - - if filterer: - self._log_relay_handler.addFilter(filterer) - - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).addHandler(self._log_relay_handler) - - self.debug_enabled = True - - @callback - def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: - """Disable debug mode for ZHA.""" - async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) - self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() - for logger_name in DEBUG_RELAY_LOGGERS: - logging.getLogger(logger_name).removeHandler(self._log_relay_handler) - if filterer: - self._log_relay_handler.removeFilter(filterer) - self.debug_enabled = False - - @callback - def _async_get_or_create_device( - self, zigpy_device: zigpy.device.Device - ) -> ZHADevice: - """Get or create a ZHA device.""" - if (zha_device := self._devices.get(zigpy_device.ieee)) is None: - zha_device = ZHADevice.new(self.hass, zigpy_device, self) - self._devices[zigpy_device.ieee] = zha_device - - device_registry = dr.async_get(self.hass) - device_registry_device = device_registry.async_get_or_create( - config_entry_id=self.config_entry.entry_id, - connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, - identifiers={(DOMAIN, str(zha_device.ieee))}, - name=zha_device.name, - manufacturer=zha_device.manufacturer, - model=zha_device.model, - ) - zha_device.set_device_id(device_registry_device.id) - return zha_device - - @callback - def _async_get_or_create_group(self, zigpy_group: zigpy.group.Group) -> ZHAGroup: - """Get or create a ZHA group.""" - zha_group = self._groups.get(zigpy_group.group_id) - if zha_group is None: - zha_group = ZHAGroup(self.hass, self, zigpy_group) - self._groups[zigpy_group.group_id] = zha_group - return zha_group - - @callback - def async_update_device( - self, sender: zigpy.device.Device, available: bool = True - ) -> None: - """Update device that has just become available.""" - if sender.ieee in self.devices: - device = self.devices[sender.ieee] - # avoid a race condition during new joins - if device.status is DeviceStatus.INITIALIZED: - device.update_available(available) - - async def async_device_initialized(self, device: zigpy.device.Device) -> None: - """Handle device joined and basic information discovered (async).""" - zha_device = self._async_get_or_create_device(device) - _LOGGER.debug( - "device - %s:%s entering async_device_initialized - is_new_join: %s", - device.nwk, - device.ieee, - zha_device.status is not DeviceStatus.INITIALIZED, - ) - - if zha_device.status is DeviceStatus.INITIALIZED: - # ZHA already has an initialized device so either the device was assigned a - # new nwk or device was physically reset and added again without being removed - _LOGGER.debug( - "device - %s:%s has been reset and re-added or its nwk address changed", - device.nwk, - device.ieee, - ) - await self._async_device_rejoined(zha_device) - else: - _LOGGER.debug( - "device - %s:%s has joined the ZHA zigbee network", - device.nwk, - device.ieee, - ) - await self._async_device_joined(zha_device) - - device_info = zha_device.zha_device_info - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.INITIALIZED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - - async def _async_device_joined(self, zha_device: ZHADevice) -> None: - zha_device.available = True - device_info = zha_device.device_info - await zha_device.async_configure() - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - await zha_device.async_initialize(from_cache=False) - async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) - - async def _async_device_rejoined(self, zha_device: ZHADevice) -> None: - _LOGGER.debug( - "skipping discovery for previously discovered device - %s:%s", - zha_device.nwk, - zha_device.ieee, - ) - # we don't have to do this on a nwk swap - # but we don't have a way to tell currently - await zha_device.async_configure() - device_info = zha_device.device_info - device_info[DEVICE_PAIRING_STATUS] = DevicePairingStatus.CONFIGURED.name - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - { - ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, - ZHA_GW_MSG_DEVICE_INFO: device_info, - }, - ) - # force async_initialize() to fire so don't explicitly call it - zha_device.available = False - zha_device.update_available(True) - - async def async_create_zigpy_group( - self, - name: str, - members: list[GroupMember] | None, - group_id: int | None = None, - ) -> ZHAGroup | None: - """Create a new Zigpy Zigbee group.""" - - # we start with two to fill any gaps from a user removing existing groups - - if group_id is None: - group_id = 2 - while group_id in self.groups: - group_id += 1 - - # guard against group already existing - if self.async_get_group_by_name(name) is None: - self.application_controller.groups.add_group(group_id, name) - if members is not None: - tasks = [] - for member in members: - _LOGGER.debug( - ( - "Adding member with IEEE: %s and endpoint ID: %s to group:" - " %s:0x%04x" - ), - member.ieee, - member.endpoint_id, - name, - group_id, - ) - tasks.append( - self.devices[member.ieee].async_add_endpoint_to_group( - member.endpoint_id, group_id - ) - ) - await asyncio.gather(*tasks) - return self.groups.get(group_id) - - async def async_remove_zigpy_group(self, group_id: int) -> None: - """Remove a Zigbee group from Zigpy.""" - if not (group := self.groups.get(group_id)): - _LOGGER.debug("Group: 0x%04x could not be found", group_id) - return - if group.members: - tasks = [member.async_remove_from_group() for member in group.members] - if tasks: - await asyncio.gather(*tasks) - self.application_controller.groups.pop(group_id) - - async def shutdown(self) -> None: - """Stop ZHA Controller Application.""" - if self.shutting_down: - _LOGGER.debug("Ignoring duplicate shutdown event") - return - - _LOGGER.debug("Shutting down ZHA ControllerApplication") - self.shutting_down = True - - for unsubscribe in self._unsubs: - unsubscribe() - for device in self.devices.values(): - device.async_cleanup_handles() - await self.application_controller.shutdown() - - def handle_message( - self, - sender: zigpy.device.Device, - profile: int, - cluster: int, - src_ep: int, - dst_ep: int, - message: bytes, - ) -> None: - """Handle message from a device Event handler.""" - if sender.ieee in self.devices and not self.devices[sender.ieee].available: - self.async_update_device(sender, available=True) - - -@callback -def async_capture_log_levels() -> dict[str, int]: - """Capture current logger levels for ZHA.""" - return { - DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), - DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), - DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( - DEBUG_COMP_ZIGPY_ZNP - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( - DEBUG_COMP_ZIGPY_DECONZ - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( - DEBUG_COMP_ZIGPY_XBEE - ).getEffectiveLevel(), - DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( - DEBUG_COMP_ZIGPY_ZIGATE - ).getEffectiveLevel(), - } - - -@callback -def async_set_logger_levels(levels: dict[str, int]) -> None: - """Set logger levels for ZHA.""" - logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) - logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) - logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) - logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) - logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) - logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) - - -class LogRelayHandler(logging.Handler): - """Log handler for error messages.""" - - def __init__(self, hass: HomeAssistant, gateway: ZHAGateway) -> None: - """Initialize a new LogErrorHandler.""" - super().__init__() - self.hass = hass - self.gateway = gateway - hass_path: str = HOMEASSISTANT_PATH[0] - config_dir = self.hass.config.config_dir - self.paths_re = re.compile( - r"(?:{})/(.*)".format( - "|".join([re.escape(x) for x in (hass_path, config_dir)]) - ) - ) - - def emit(self, record: LogRecord) -> None: - """Relay log message via dispatcher.""" - entry = LogEntry( - record, - self.paths_re, - formatter=self.formatter, - figure_out_source=record.levelno >= logging.WARNING, - ) - async_dispatcher_send( - self.hass, - ZHA_GW_MSG, - {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, - ) diff --git a/homeassistant/components/zha/core/group.py b/homeassistant/components/zha/core/group.py deleted file mode 100644 index a6156ab63b7..00000000000 --- a/homeassistant/components/zha/core/group.py +++ /dev/null @@ -1,246 +0,0 @@ -"""Group for Zigbee Home Automation.""" - -from __future__ import annotations - -import asyncio -import logging -from typing import TYPE_CHECKING, Any, NamedTuple - -import zigpy.endpoint -import zigpy.exceptions -import zigpy.group -from zigpy.types.named import EUI64 - -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_registry import async_entries_for_device - -from .helpers import LogMixin - -if TYPE_CHECKING: - from .device import ZHADevice - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) - - -class GroupMember(NamedTuple): - """Describes a group member.""" - - ieee: EUI64 - endpoint_id: int - - -class GroupEntityReference(NamedTuple): - """Reference to a group entity.""" - - name: str | None - original_name: str | None - entity_id: int - - -class ZHAGroupMember(LogMixin): - """Composite object that represents a device endpoint in a Zigbee group.""" - - def __init__( - self, zha_group: ZHAGroup, zha_device: ZHADevice, endpoint_id: int - ) -> None: - """Initialize the group member.""" - self._zha_group = zha_group - self._zha_device = zha_device - self._endpoint_id = endpoint_id - - @property - def group(self) -> ZHAGroup: - """Return the group this member belongs to.""" - return self._zha_group - - @property - def endpoint_id(self) -> int: - """Return the endpoint id for this group member.""" - return self._endpoint_id - - @property - def endpoint(self) -> zigpy.endpoint.Endpoint: - """Return the endpoint for this group member.""" - return self._zha_device.device.endpoints.get(self.endpoint_id) - - @property - def device(self) -> ZHADevice: - """Return the ZHA device for this group member.""" - return self._zha_device - - @property - def member_info(self) -> dict[str, Any]: - """Get ZHA group info.""" - member_info: dict[str, Any] = {} - member_info["endpoint_id"] = self.endpoint_id - member_info["device"] = self.device.zha_device_info - member_info["entities"] = self.associated_entities - return member_info - - @property - def associated_entities(self) -> list[dict[str, Any]]: - """Return the list of entities that were derived from this endpoint.""" - entity_registry = er.async_get(self._zha_device.hass) - zha_device_registry = self.device.gateway.device_registry - - entity_info = [] - - for entity_ref in zha_device_registry.get(self.device.ieee): - # We have device entities now that don't leverage cluster handlers - if not entity_ref.cluster_handlers: - continue - entity = entity_registry.async_get(entity_ref.reference_id) - handler = list(entity_ref.cluster_handlers.values())[0] - - if ( - entity is None - or handler.cluster.endpoint.endpoint_id != self.endpoint_id - ): - continue - - entity_info.append( - GroupEntityReference( - name=entity.name, - original_name=entity.original_name, - entity_id=entity_ref.reference_id, - )._asdict() - ) - - return entity_info - - async def async_remove_from_group(self) -> None: - """Remove the device endpoint from the provided zigbee group.""" - try: - await self._zha_device.device.endpoints[ - self._endpoint_id - ].remove_from_group(self._zha_group.group_id) - except (zigpy.exceptions.ZigbeeException, TimeoutError) as ex: - self.debug( - ( - "Failed to remove endpoint: %s for device '%s' from group: 0x%04x" - " ex: %s" - ), - self._endpoint_id, - self._zha_device.ieee, - self._zha_group.group_id, - str(ex), - ) - - def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (f"0x{self._zha_group.group_id:04x}", self.endpoint_id, *args) - _LOGGER.log(level, msg, *args, **kwargs) - - -class ZHAGroup(LogMixin): - """ZHA Zigbee group object.""" - - def __init__( - self, - hass: HomeAssistant, - zha_gateway: ZHAGateway, - zigpy_group: zigpy.group.Group, - ) -> None: - """Initialize the group.""" - self.hass = hass - self._zha_gateway = zha_gateway - self._zigpy_group = zigpy_group - - @property - def name(self) -> str: - """Return group name.""" - return self._zigpy_group.name - - @property - def group_id(self) -> int: - """Return group name.""" - return self._zigpy_group.group_id - - @property - def endpoint(self) -> zigpy.endpoint.Endpoint: - """Return the endpoint for this group.""" - return self._zigpy_group.endpoint - - @property - def members(self) -> list[ZHAGroupMember]: - """Return the ZHA devices that are members of this group.""" - return [ - ZHAGroupMember(self, self._zha_gateway.devices[member_ieee], endpoint_id) - for (member_ieee, endpoint_id) in self._zigpy_group.members - if member_ieee in self._zha_gateway.devices - ] - - async def async_add_members(self, members: list[GroupMember]) -> None: - """Add members to this group.""" - if len(members) > 1: - tasks = [ - self._zha_gateway.devices[member.ieee].async_add_endpoint_to_group( - member.endpoint_id, self.group_id - ) - for member in members - ] - await asyncio.gather(*tasks) - else: - await self._zha_gateway.devices[ - members[0].ieee - ].async_add_endpoint_to_group(members[0].endpoint_id, self.group_id) - - async def async_remove_members(self, members: list[GroupMember]) -> None: - """Remove members from this group.""" - if len(members) > 1: - tasks = [ - self._zha_gateway.devices[member.ieee].async_remove_endpoint_from_group( - member.endpoint_id, self.group_id - ) - for member in members - ] - await asyncio.gather(*tasks) - else: - await self._zha_gateway.devices[ - members[0].ieee - ].async_remove_endpoint_from_group(members[0].endpoint_id, self.group_id) - - @property - def member_entity_ids(self) -> list[str]: - """Return the ZHA entity ids for all entities for the members of this group.""" - return [ - entity_reference["entity_id"] - for member in self.members - for entity_reference in member.associated_entities - ] - - def get_domain_entity_ids(self, domain: str) -> list[str]: - """Return entity ids from the entity domain for this group.""" - entity_registry = er.async_get(self.hass) - domain_entity_ids: list[str] = [] - - for member in self.members: - if member.device.is_coordinator: - continue - entities = async_entries_for_device( - entity_registry, - member.device.device_id, - include_disabled_entities=True, - ) - domain_entity_ids.extend( - [entity.entity_id for entity in entities if entity.domain == domain] - ) - return domain_entity_ids - - @property - def group_info(self) -> dict[str, Any]: - """Get ZHA group info.""" - group_info: dict[str, Any] = {} - group_info["group_id"] = self.group_id - group_info["name"] = self.name - group_info["members"] = [member.member_info for member in self.members] - return group_info - - def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: - """Log a message.""" - msg = f"[%s](%s): {msg}" - args = (self.name, self.group_id, *args) - _LOGGER.log(level, msg, *args, **kwargs) diff --git a/homeassistant/components/zha/core/helpers.py b/homeassistant/components/zha/core/helpers.py deleted file mode 100644 index 2508dd34fd4..00000000000 --- a/homeassistant/components/zha/core/helpers.py +++ /dev/null @@ -1,523 +0,0 @@ -"""Helpers for Zigbee Home Automation. - -For more details about this component, please refer to the documentation at -https://home-assistant.io/integrations/zha/ -""" - -from __future__ import annotations - -import binascii -import collections -from collections.abc import Callable, Iterator -import dataclasses -from dataclasses import dataclass -import enum -import logging -import re -from typing import TYPE_CHECKING, Any, overload - -import voluptuous as vol -import zigpy.exceptions -import zigpy.types -import zigpy.util -import zigpy.zcl -from zigpy.zcl.foundation import CommandSchema -import zigpy.zdo.types as zdo_types - -from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.components.number import NumberDeviceClass -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - Platform, - UnitOfApparentPower, - UnitOfDataRate, - UnitOfElectricCurrent, - UnitOfElectricPotential, - UnitOfEnergy, - UnitOfFrequency, - UnitOfInformation, - UnitOfIrradiance, - UnitOfLength, - UnitOfMass, - UnitOfPower, - UnitOfPrecipitationDepth, - UnitOfPressure, - UnitOfSoundPressure, - UnitOfSpeed, - UnitOfTemperature, - UnitOfTime, - UnitOfVolume, - UnitOfVolumeFlowRate, - UnitOfVolumetricFlux, -) -from homeassistant.core import HomeAssistant, State, callback -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.typing import ConfigType - -from .const import CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, CUSTOM_CONFIGURATION, DATA_ZHA -from .registries import BINDABLE_CLUSTERS - -if TYPE_CHECKING: - from .device import ZHADevice - from .gateway import ZHAGateway - -_LOGGER = logging.getLogger(__name__) - - -@dataclass -class BindingPair: - """Information for binding.""" - - source_cluster: zigpy.zcl.Cluster - target_ieee: zigpy.types.EUI64 - target_ep_id: int - - @property - def destination_address(self) -> zdo_types.MultiAddress: - """Return a ZDO multi address instance.""" - return zdo_types.MultiAddress( - addrmode=3, ieee=self.target_ieee, endpoint=self.target_ep_id - ) - - -async def safe_read( - cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None -): - """Swallow all exceptions from network read. - - If we throw during initialization, setup fails. Rather have an entity that - exists, but is in a maybe wrong state, than no entity. This method should - probably only be used during initialization. - """ - try: - result, _ = await cluster.read_attributes( - attributes, - allow_cache=allow_cache, - only_cache=only_cache, - manufacturer=manufacturer, - ) - except Exception: # noqa: BLE001 - return {} - return result - - -async def get_matched_clusters( - source_zha_device: ZHADevice, target_zha_device: ZHADevice -) -> list[BindingPair]: - """Get matched input/output cluster pairs for 2 devices.""" - source_clusters = source_zha_device.async_get_std_clusters() - target_clusters = target_zha_device.async_get_std_clusters() - clusters_to_bind = [] - - for endpoint_id in source_clusters: - for cluster_id in source_clusters[endpoint_id][CLUSTER_TYPE_OUT]: - if cluster_id not in BINDABLE_CLUSTERS: - continue - if target_zha_device.nwk == 0x0000: - cluster_pair = BindingPair( - source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ - cluster_id - ], - target_ieee=target_zha_device.ieee, - target_ep_id=target_zha_device.device.application.get_endpoint_id( - cluster_id, is_server_cluster=True - ), - ) - clusters_to_bind.append(cluster_pair) - continue - for t_endpoint_id in target_clusters: - if cluster_id in target_clusters[t_endpoint_id][CLUSTER_TYPE_IN]: - cluster_pair = BindingPair( - source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][ - cluster_id - ], - target_ieee=target_zha_device.ieee, - target_ep_id=t_endpoint_id, - ) - clusters_to_bind.append(cluster_pair) - return clusters_to_bind - - -def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: - """Convert a cluster command schema to a voluptuous schema.""" - return vol.Schema( - { - vol.Optional(field.name) - if field.optional - else vol.Required(field.name): schema_type_to_vol(field.type) - for field in schema.fields - } - ) - - -def schema_type_to_vol(field_type: Any) -> Any: - """Convert a schema type to a voluptuous type.""" - if issubclass(field_type, enum.Flag) and field_type.__members__: - return cv.multi_select( - [key.replace("_", " ") for key in field_type.__members__] - ) - if issubclass(field_type, enum.Enum) and field_type.__members__: - return vol.In([key.replace("_", " ") for key in field_type.__members__]) - if ( - issubclass(field_type, zigpy.types.FixedIntType) - or issubclass(field_type, enum.Flag) - or issubclass(field_type, enum.Enum) - ): - return vol.All( - vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) - ) - return str - - -def convert_to_zcl_values( - fields: dict[str, Any], schema: CommandSchema -) -> dict[str, Any]: - """Convert user input to ZCL values.""" - converted_fields: dict[str, Any] = {} - for field in schema.fields: - if field.name not in fields: - continue - value = fields[field.name] - if issubclass(field.type, enum.Flag) and isinstance(value, list): - new_value = 0 - - for flag in value: - if isinstance(flag, str): - new_value |= field.type[flag.replace(" ", "_")] - else: - new_value |= flag - - value = field.type(new_value) - elif issubclass(field.type, enum.Enum): - value = ( - field.type[value.replace(" ", "_")] - if isinstance(value, str) - else field.type(value) - ) - else: - value = field.type(value) - _LOGGER.debug( - "Converted ZCL schema field(%s) value from: %s to: %s", - field.name, - fields[field.name], - value, - ) - converted_fields[field.name] = value - return converted_fields - - -@callback -def async_is_bindable_target(source_zha_device, target_zha_device): - """Determine if target is bindable to source.""" - if target_zha_device.nwk == 0x0000: - return True - - source_clusters = source_zha_device.async_get_std_clusters() - target_clusters = target_zha_device.async_get_std_clusters() - - for endpoint_id in source_clusters: - for t_endpoint_id in target_clusters: - matches = set( - source_clusters[endpoint_id][CLUSTER_TYPE_OUT].keys() - ).intersection(target_clusters[t_endpoint_id][CLUSTER_TYPE_IN].keys()) - if any(bindable in BINDABLE_CLUSTERS for bindable in matches): - return True - return False - - -@callback -def async_get_zha_config_value[_T]( - config_entry: ConfigEntry, section: str, config_key: str, default: _T -) -> _T: - """Get the value for the specified configuration from the ZHA config entry.""" - return ( - config_entry.options.get(CUSTOM_CONFIGURATION, {}) - .get(section, {}) - .get(config_key, default) - ) - - -def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): - """Determine if a device containing the specified in cluster is paired.""" - zha_gateway = get_zha_gateway(hass) - zha_devices = zha_gateway.devices.values() - for zha_device in zha_devices: - if skip_coordinator and zha_device.is_coordinator: - continue - clusters_by_endpoint = zha_device.async_get_clusters() - for clusters in clusters_by_endpoint.values(): - if ( - cluster_id in clusters[CLUSTER_TYPE_IN] - or cluster_id in clusters[CLUSTER_TYPE_OUT] - ): - return True - return False - - -@callback -def async_get_zha_device(hass: HomeAssistant, device_id: str) -> ZHADevice: - """Get a ZHA device for the given device registry id.""" - device_registry = dr.async_get(hass) - registry_device = device_registry.async_get(device_id) - if not registry_device: - _LOGGER.error("Device id `%s` not found in registry", device_id) - raise KeyError(f"Device id `{device_id}` not found in registry.") - zha_gateway = get_zha_gateway(hass) - try: - ieee_address = list(registry_device.identifiers)[0][1] - ieee = zigpy.types.EUI64.convert(ieee_address) - except (IndexError, ValueError) as ex: - _LOGGER.error( - "Unable to determine device IEEE for device with device id `%s`", device_id - ) - raise KeyError( - f"Unable to determine device IEEE for device with device id `{device_id}`." - ) from ex - return zha_gateway.devices[ieee] - - -def find_state_attributes(states: list[State], key: str) -> Iterator[Any]: - """Find attributes with matching key from states.""" - for state in states: - if (value := state.attributes.get(key)) is not None: - yield value - - -def mean_int(*args): - """Return the mean of the supplied values.""" - return int(sum(args) / len(args)) - - -def mean_tuple(*args): - """Return the mean values along the columns of the supplied values.""" - return tuple(sum(x) / len(x) for x in zip(*args, strict=False)) - - -def reduce_attribute( - states: list[State], - key: str, - default: Any | None = None, - reduce: Callable[..., Any] = mean_int, -) -> Any: - """Find the first attribute matching key from states. - - If none are found, return default. - """ - attrs = list(find_state_attributes(states, key)) - - if not attrs: - return default - - if len(attrs) == 1: - return attrs[0] - - return reduce(*attrs) - - -class LogMixin: - """Log helper.""" - - def log(self, level, msg, *args, **kwargs): - """Log with level.""" - raise NotImplementedError - - def debug(self, msg, *args, **kwargs): - """Debug level log.""" - return self.log(logging.DEBUG, msg, *args, **kwargs) - - def info(self, msg, *args, **kwargs): - """Info level log.""" - return self.log(logging.INFO, msg, *args, **kwargs) - - def warning(self, msg, *args, **kwargs): - """Warning method log.""" - return self.log(logging.WARNING, msg, *args, **kwargs) - - def error(self, msg, *args, **kwargs): - """Error level log.""" - return self.log(logging.ERROR, msg, *args, **kwargs) - - -def convert_install_code(value: str) -> zigpy.types.KeyData: - """Convert string to install code bytes and validate length.""" - - try: - code = binascii.unhexlify(value.replace("-", "").lower()) - except binascii.Error as exc: - raise vol.Invalid(f"invalid hex string: {value}") from exc - - if len(code) != 18: # 16 byte code + 2 crc bytes - raise vol.Invalid("invalid length of the install code") - - link_key = zigpy.util.convert_install_code(code) - if link_key is None: - raise vol.Invalid("invalid install code") - - return link_key - - -QR_CODES = ( - # Consciot - r"^([\da-fA-F]{16})\|([\da-fA-F]{36})$", - # Enbrighten - r""" - ^Z: - ([0-9a-fA-F]{16}) # IEEE address - \$I: - ([0-9a-fA-F]{36}) # install code - $ - """, - # Aqara - r""" - \$A: - ([0-9a-fA-F]{16}) # IEEE address - \$I: - ([0-9a-fA-F]{36}) # install code - $ - """, - # Bosch - r""" - ^RB01SG - [0-9a-fA-F]{34} - ([0-9a-fA-F]{16}) # IEEE address - DLK - ([0-9a-fA-F]{36}|[0-9a-fA-F]{32}) # install code / link key - $ - """, -) - - -def qr_to_install_code(qr_code: str) -> tuple[zigpy.types.EUI64, zigpy.types.KeyData]: - """Try to parse the QR code. - - if successful, return a tuple of a EUI64 address and install code. - """ - - for code_pattern in QR_CODES: - match = re.search(code_pattern, qr_code, re.VERBOSE) - if match is None: - continue - - ieee_hex = binascii.unhexlify(match[1]) - ieee = zigpy.types.EUI64(ieee_hex[::-1]) - - # Bosch supplies (A) device specific link key (DSLK) or (A) install code + crc - if "RB01SG" in code_pattern and len(match[2]) == 32: - link_key_hex = binascii.unhexlify(match[2]) - link_key = zigpy.types.KeyData(link_key_hex) - return ieee, link_key - install_code = match[2] - # install_code sanity check - link_key = convert_install_code(install_code) - return ieee, link_key - - raise vol.Invalid(f"couldn't convert qr code: {qr_code}") - - -@dataclasses.dataclass(kw_only=True, slots=True) -class ZHAData: - """ZHA component data stored in `hass.data`.""" - - yaml_config: ConfigType = dataclasses.field(default_factory=dict) - platforms: collections.defaultdict[Platform, list] = dataclasses.field( - default_factory=lambda: collections.defaultdict(list) - ) - gateway: ZHAGateway | None = dataclasses.field(default=None) - device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( - default_factory=dict - ) - allow_polling: bool = dataclasses.field(default=False) - - -def get_zha_data(hass: HomeAssistant) -> ZHAData: - """Get the global ZHA data object.""" - if DATA_ZHA not in hass.data: - hass.data[DATA_ZHA] = ZHAData() - - return hass.data[DATA_ZHA] - - -def get_zha_gateway(hass: HomeAssistant) -> ZHAGateway: - """Get the ZHA gateway object.""" - if (zha_gateway := get_zha_data(hass).gateway) is None: - raise ValueError("No gateway object exists") - - return zha_gateway - - -UNITS_OF_MEASURE = { - UnitOfApparentPower.__name__: UnitOfApparentPower, - UnitOfPower.__name__: UnitOfPower, - UnitOfEnergy.__name__: UnitOfEnergy, - UnitOfElectricCurrent.__name__: UnitOfElectricCurrent, - UnitOfElectricPotential.__name__: UnitOfElectricPotential, - UnitOfTemperature.__name__: UnitOfTemperature, - UnitOfTime.__name__: UnitOfTime, - UnitOfLength.__name__: UnitOfLength, - UnitOfFrequency.__name__: UnitOfFrequency, - UnitOfPressure.__name__: UnitOfPressure, - UnitOfSoundPressure.__name__: UnitOfSoundPressure, - UnitOfVolume.__name__: UnitOfVolume, - UnitOfVolumeFlowRate.__name__: UnitOfVolumeFlowRate, - UnitOfMass.__name__: UnitOfMass, - UnitOfIrradiance.__name__: UnitOfIrradiance, - UnitOfVolumetricFlux.__name__: UnitOfVolumetricFlux, - UnitOfPrecipitationDepth.__name__: UnitOfPrecipitationDepth, - UnitOfSpeed.__name__: UnitOfSpeed, - UnitOfInformation.__name__: UnitOfInformation, - UnitOfDataRate.__name__: UnitOfDataRate, -} - - -def validate_unit(quirks_unit: enum.Enum) -> enum.Enum: - """Validate and return a unit of measure.""" - return UNITS_OF_MEASURE[type(quirks_unit).__name__](quirks_unit.value) - - -@overload -def validate_device_class( - device_class_enum: type[BinarySensorDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> BinarySensorDeviceClass | None: ... - - -@overload -def validate_device_class( - device_class_enum: type[SensorDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> SensorDeviceClass | None: ... - - -@overload -def validate_device_class( - device_class_enum: type[NumberDeviceClass], - metadata_value, - platform: str, - logger: logging.Logger, -) -> NumberDeviceClass | None: ... - - -def validate_device_class( - device_class_enum: type[ - BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass - ], - metadata_value: enum.Enum, - platform: str, - logger: logging.Logger, -) -> BinarySensorDeviceClass | SensorDeviceClass | NumberDeviceClass | None: - """Validate and return a device class.""" - try: - return device_class_enum(metadata_value.value) - except ValueError as ex: - logger.warning( - "Quirks provided an invalid device class: %s for platform %s: %s", - metadata_value, - platform, - ex, - ) - return None diff --git a/homeassistant/components/zha/core/registries.py b/homeassistant/components/zha/core/registries.py deleted file mode 100644 index 9d23b77efaa..00000000000 --- a/homeassistant/components/zha/core/registries.py +++ /dev/null @@ -1,516 +0,0 @@ -"""Mapping registries for Zigbee Home Automation.""" - -from __future__ import annotations - -import collections -from collections.abc import Callable -import dataclasses -from operator import attrgetter -from typing import TYPE_CHECKING - -import attr -from zigpy import zcl -import zigpy.profiles.zha -import zigpy.profiles.zll -from zigpy.types.named import EUI64 - -from homeassistant.const import Platform - -from .decorators import DictRegistry, NestedDictRegistry, SetRegistry - -if TYPE_CHECKING: - from ..entity import ZhaEntity, ZhaGroupEntity - from .cluster_handlers import ClientClusterHandler, ClusterHandler - - -GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] - -IKEA_AIR_PURIFIER_CLUSTER = 0xFC7D -PHILLIPS_REMOTE_CLUSTER = 0xFC00 -SMARTTHINGS_ACCELERATION_CLUSTER = 0xFC02 -SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE = 0x8000 -SMARTTHINGS_HUMIDITY_CLUSTER = 0xFC45 -TUYA_MANUFACTURER_CLUSTER = 0xEF00 -VOC_LEVEL_CLUSTER = 0x042E - -REMOTE_DEVICE_TYPES = { - zigpy.profiles.zha.PROFILE_ID: [ - zigpy.profiles.zha.DeviceType.COLOR_CONTROLLER, - zigpy.profiles.zha.DeviceType.COLOR_DIMMER_SWITCH, - zigpy.profiles.zha.DeviceType.COLOR_SCENE_CONTROLLER, - zigpy.profiles.zha.DeviceType.DIMMER_SWITCH, - zigpy.profiles.zha.DeviceType.LEVEL_CONTROL_SWITCH, - zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - zigpy.profiles.zha.DeviceType.NON_COLOR_SCENE_CONTROLLER, - zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT_SWITCH, - zigpy.profiles.zha.DeviceType.REMOTE_CONTROL, - zigpy.profiles.zha.DeviceType.SCENE_SELECTOR, - ], - zigpy.profiles.zll.PROFILE_ID: [ - zigpy.profiles.zll.DeviceType.COLOR_CONTROLLER, - zigpy.profiles.zll.DeviceType.COLOR_SCENE_CONTROLLER, - zigpy.profiles.zll.DeviceType.CONTROL_BRIDGE, - zigpy.profiles.zll.DeviceType.CONTROLLER, - zigpy.profiles.zll.DeviceType.SCENE_CONTROLLER, - ], -} -REMOTE_DEVICE_TYPES = collections.defaultdict(list, REMOTE_DEVICE_TYPES) - -SINGLE_INPUT_CLUSTER_DEVICE_CLASS = { - # this works for now but if we hit conflicts we can break it out to - # a different dict that is keyed by manufacturer - zcl.clusters.general.AnalogOutput.cluster_id: Platform.NUMBER, - zcl.clusters.general.MultistateInput.cluster_id: Platform.SENSOR, - zcl.clusters.general.OnOff.cluster_id: Platform.SWITCH, - zcl.clusters.hvac.Fan.cluster_id: Platform.FAN, -} - -SINGLE_OUTPUT_CLUSTER_DEVICE_CLASS = { - zcl.clusters.general.OnOff.cluster_id: Platform.BINARY_SENSOR, - zcl.clusters.security.IasAce.cluster_id: Platform.ALARM_CONTROL_PANEL, -} - -BINDABLE_CLUSTERS = SetRegistry() - -DEVICE_CLASS = { - zigpy.profiles.zha.PROFILE_ID: { - SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE: Platform.DEVICE_TRACKER, - zigpy.profiles.zha.DeviceType.THERMOSTAT: Platform.CLIMATE, - zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_BALLAST: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.DIMMABLE_PLUG_IN_UNIT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT: Platform.COVER, - zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, - zigpy.profiles.zha.DeviceType.ON_OFF_PLUG_IN_UNIT: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.SHADE: Platform.COVER, - zigpy.profiles.zha.DeviceType.SMART_PLUG: Platform.SWITCH, - zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL: Platform.ALARM_CONTROL_PANEL, - zigpy.profiles.zha.DeviceType.IAS_WARNING_DEVICE: Platform.SIREN, - }, - zigpy.profiles.zll.PROFILE_ID: { - zigpy.profiles.zll.DeviceType.COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.COLOR_TEMPERATURE_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.DIMMABLE_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.DIMMABLE_PLUGIN_UNIT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.EXTENDED_COLOR_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.ON_OFF_LIGHT: Platform.LIGHT, - zigpy.profiles.zll.DeviceType.ON_OFF_PLUGIN_UNIT: Platform.SWITCH, - }, -} -DEVICE_CLASS = collections.defaultdict(dict, DEVICE_CLASS) - -CLUSTER_HANDLER_ONLY_CLUSTERS = SetRegistry() -CLIENT_CLUSTER_HANDLER_REGISTRY: DictRegistry[type[ClientClusterHandler]] = ( - DictRegistry() -) -ZIGBEE_CLUSTER_HANDLER_REGISTRY: NestedDictRegistry[type[ClusterHandler]] = ( - NestedDictRegistry() -) - -WEIGHT_ATTR = attrgetter("weight") - - -def set_or_callable(value) -> frozenset[str] | Callable: - """Convert single str or None to a set. Pass through callables and sets.""" - if value is None: - return frozenset() - if callable(value): - return value - if isinstance(value, (frozenset, set, list)): - return frozenset(value) - return frozenset([str(value)]) - - -def _get_empty_frozenset() -> frozenset[str]: - return frozenset() - - -@attr.s(frozen=True) -class MatchRule: - """Match a ZHA Entity to a cluster handler name or generic id.""" - - cluster_handler_names: frozenset[str] = attr.ib( - factory=frozenset, converter=set_or_callable - ) - generic_ids: frozenset[str] = attr.ib(factory=frozenset, converter=set_or_callable) - manufacturers: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - models: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - aux_cluster_handlers: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - quirk_ids: frozenset[str] | Callable = attr.ib( - factory=_get_empty_frozenset, converter=set_or_callable - ) - - @property - def weight(self) -> int: - """Return the weight of the matching rule. - - More specific matches should be preferred over less specific. Quirk class - matching rules have priority over model matching rules - and have a priority over manufacturer matching rules and rules matching a - single model/manufacturer get a better priority over rules matching multiple - models/manufacturers. And any model or manufacturers matching rules get better - priority over rules matching only cluster handlers. - But in case of a cluster handler name/cluster handler id matching, we give rules matching - multiple cluster handlers a better priority over rules matching a single cluster handler. - """ - weight = 0 - if self.quirk_ids: - weight += 501 - (1 if callable(self.quirk_ids) else len(self.quirk_ids)) - - if self.models: - weight += 401 - (1 if callable(self.models) else len(self.models)) - - if self.manufacturers: - weight += 301 - ( - 1 if callable(self.manufacturers) else len(self.manufacturers) - ) - - weight += 10 * len(self.cluster_handler_names) - weight += 5 * len(self.generic_ids) - if isinstance(self.aux_cluster_handlers, frozenset): - weight += 1 * len(self.aux_cluster_handlers) - return weight - - def claim_cluster_handlers( - self, cluster_handlers: list[ClusterHandler] - ) -> list[ClusterHandler]: - """Return a list of cluster handlers this rule matches + aux cluster handlers.""" - claimed = [] - if isinstance(self.cluster_handler_names, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.name in self.cluster_handler_names] - ) - if isinstance(self.generic_ids, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.generic_id in self.generic_ids] - ) - if isinstance(self.aux_cluster_handlers, frozenset): - claimed.extend( - [ch for ch in cluster_handlers if ch.name in self.aux_cluster_handlers] - ) - return claimed - - def strict_matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> bool: - """Return True if this device matches the criteria.""" - return all(self._matched(manufacturer, model, cluster_handlers, quirk_id)) - - def loose_matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> bool: - """Return True if this device matches the criteria.""" - return any(self._matched(manufacturer, model, cluster_handlers, quirk_id)) - - def _matched( - self, - manufacturer: str, - model: str, - cluster_handlers: list, - quirk_id: str | None, - ) -> list: - """Return a list of field matches.""" - if not any(attr.asdict(self).values()): - return [False] - - matches = [] - if self.cluster_handler_names: - cluster_handler_names = {ch.name for ch in cluster_handlers} - matches.append(self.cluster_handler_names.issubset(cluster_handler_names)) - - if self.generic_ids: - all_generic_ids = {ch.generic_id for ch in cluster_handlers} - matches.append(self.generic_ids.issubset(all_generic_ids)) - - if self.manufacturers: - if callable(self.manufacturers): - matches.append(self.manufacturers(manufacturer)) - else: - matches.append(manufacturer in self.manufacturers) - - if self.models: - if callable(self.models): - matches.append(self.models(model)) - else: - matches.append(model in self.models) - - if self.quirk_ids: - if callable(self.quirk_ids): - matches.append(self.quirk_ids(quirk_id)) - else: - matches.append(quirk_id in self.quirk_ids) - - return matches - - -@dataclasses.dataclass -class EntityClassAndClusterHandlers: - """Container for entity class and corresponding cluster handlers.""" - - entity_class: type[ZhaEntity] - claimed_cluster_handlers: list[ClusterHandler] - - -class ZHAEntityRegistry: - """Cluster handler to ZHA Entity mapping.""" - - def __init__(self) -> None: - """Initialize Registry instance.""" - self._strict_registry: dict[Platform, dict[MatchRule, type[ZhaEntity]]] = ( - collections.defaultdict(dict) - ) - self._multi_entity_registry: dict[ - Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] - ] = collections.defaultdict( - lambda: collections.defaultdict(lambda: collections.defaultdict(list)) - ) - self._config_diagnostic_entity_registry: dict[ - Platform, dict[int | str | None, dict[MatchRule, list[type[ZhaEntity]]]] - ] = collections.defaultdict( - lambda: collections.defaultdict(lambda: collections.defaultdict(list)) - ) - self._group_registry: dict[str, type[ZhaGroupEntity]] = {} - self.single_device_matches: dict[Platform, dict[EUI64, list[str]]] = ( - collections.defaultdict(lambda: collections.defaultdict(list)) - ) - - def get_entity( - self, - component: Platform, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - default: type[ZhaEntity] | None = None, - ) -> tuple[type[ZhaEntity] | None, list[ClusterHandler]]: - """Match a ZHA ClusterHandler to a ZHA Entity class.""" - matches = self._strict_registry[component] - for match in sorted(matches, key=WEIGHT_ATTR, reverse=True): - if match.strict_matched(manufacturer, model, cluster_handlers, quirk_id): - claimed = match.claim_cluster_handlers(cluster_handlers) - return self._strict_registry[component][match], claimed - - return default, [] - - def get_multi_entity( - self, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - ) -> tuple[ - dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] - ]: - """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" - result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( - collections.defaultdict(list) - ) - all_claimed: set[ClusterHandler] = set() - for component, stop_match_groups in self._multi_entity_registry.items(): - for stop_match_grp, matches in stop_match_groups.items(): - sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) - for match in sorted_matches: - if match.strict_matched( - manufacturer, model, cluster_handlers, quirk_id - ): - claimed = match.claim_cluster_handlers(cluster_handlers) - for ent_class in stop_match_groups[stop_match_grp][match]: - ent_n_cluster_handlers = EntityClassAndClusterHandlers( - ent_class, claimed - ) - result[component].append(ent_n_cluster_handlers) - all_claimed |= set(claimed) - if stop_match_grp: - break - - return result, list(all_claimed) - - def get_config_diagnostic_entity( - self, - manufacturer: str, - model: str, - cluster_handlers: list[ClusterHandler], - quirk_id: str | None, - ) -> tuple[ - dict[Platform, list[EntityClassAndClusterHandlers]], list[ClusterHandler] - ]: - """Match ZHA cluster handlers to potentially multiple ZHA Entity classes.""" - result: dict[Platform, list[EntityClassAndClusterHandlers]] = ( - collections.defaultdict(list) - ) - all_claimed: set[ClusterHandler] = set() - for ( - component, - stop_match_groups, - ) in self._config_diagnostic_entity_registry.items(): - for stop_match_grp, matches in stop_match_groups.items(): - sorted_matches = sorted(matches, key=WEIGHT_ATTR, reverse=True) - for match in sorted_matches: - if match.strict_matched( - manufacturer, model, cluster_handlers, quirk_id - ): - claimed = match.claim_cluster_handlers(cluster_handlers) - for ent_class in stop_match_groups[stop_match_grp][match]: - ent_n_cluster_handlers = EntityClassAndClusterHandlers( - ent_class, claimed - ) - result[component].append(ent_n_cluster_handlers) - all_claimed |= set(claimed) - if stop_match_grp: - break - - return result, list(all_claimed) - - def get_group_entity(self, component: str) -> type[ZhaGroupEntity] | None: - """Match a ZHA group to a ZHA Entity class.""" - return self._group_registry.get(component) - - def strict_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a strict match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_ent: _ZhaEntityT) -> _ZhaEntityT: - """Register a strict match rule. - - All non-empty fields of a match rule must match. - """ - self._strict_registry[component][rule] = zha_ent - return zha_ent - - return decorator - - def multipass_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - stop_on_match_group: int | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a loose match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: - """Register a loose match rule. - - All non empty fields of a match rule must match. - """ - # group the rules by cluster handlers - self._multi_entity_registry[component][stop_on_match_group][rule].append( - zha_entity - ) - return zha_entity - - return decorator - - def config_diagnostic_match[_ZhaEntityT: type[ZhaEntity]]( - self, - component: Platform, - cluster_handler_names: set[str] | str | None = None, - generic_ids: set[str] | str | None = None, - manufacturers: Callable | set[str] | str | None = None, - models: Callable | set[str] | str | None = None, - aux_cluster_handlers: Callable | set[str] | str | None = None, - stop_on_match_group: int | str | None = None, - quirk_ids: set[str] | str | None = None, - ) -> Callable[[_ZhaEntityT], _ZhaEntityT]: - """Decorate a loose match rule.""" - - rule = MatchRule( - cluster_handler_names, - generic_ids, - manufacturers, - models, - aux_cluster_handlers, - quirk_ids, - ) - - def decorator(zha_entity: _ZhaEntityT) -> _ZhaEntityT: - """Register a loose match rule. - - All non-empty fields of a match rule must match. - """ - # group the rules by cluster handlers - self._config_diagnostic_entity_registry[component][stop_on_match_group][ - rule - ].append(zha_entity) - return zha_entity - - return decorator - - def group_match[_ZhaGroupEntityT: type[ZhaGroupEntity]]( - self, component: Platform - ) -> Callable[[_ZhaGroupEntityT], _ZhaGroupEntityT]: - """Decorate a group match rule.""" - - def decorator(zha_ent: _ZhaGroupEntityT) -> _ZhaGroupEntityT: - """Register a group match rule.""" - self._group_registry[component] = zha_ent - return zha_ent - - return decorator - - def prevent_entity_creation(self, platform: Platform, ieee: EUI64, key: str): - """Return True if the entity should not be created.""" - platform_restrictions = self.single_device_matches[platform] - device_restrictions = platform_restrictions[ieee] - if key in device_restrictions: - return True - device_restrictions.append(key) - return False - - def clean_up(self) -> None: - """Clean up post discovery.""" - self.single_device_matches = collections.defaultdict( - lambda: collections.defaultdict(list) - ) - - -ZHA_ENTITIES = ZHAEntityRegistry() diff --git a/homeassistant/components/zha/cover.py b/homeassistant/components/zha/cover.py index 718b6fed3a2..0d6be2dbb35 100644 --- a/homeassistant/components/zha/cover.py +++ b/homeassistant/components/zha/cover.py @@ -2,16 +2,17 @@ from __future__ import annotations -import asyncio +from collections.abc import Mapping import functools import logging -from typing import TYPE_CHECKING, Any, cast +from typing import Any -from zigpy.zcl.clusters.closures import WindowCovering as WindowCoveringCluster -from zigpy.zcl.foundation import Status +from zha.application.platforms.cover import Shade as ZhaShade +from zha.application.platforms.cover.const import ( + CoverEntityFeature as ZHACoverEntityFeature, +) from homeassistant.components.cover import ( - ATTR_CURRENT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, CoverDeviceClass, @@ -19,41 +20,22 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - STATE_CLOSED, - STATE_CLOSING, - STATE_OPEN, - STATE_OPENING, - Platform, -) -from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.cluster_handlers.closures import WindowCoveringClusterHandler -from .core.const import ( - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_SHADE, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - SIGNAL_SET_LEVEL, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.COVER) - async def async_setup_entry( hass: HomeAssistant, @@ -68,421 +50,143 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, ZhaCover, entities_to_create ), ) config_entry.async_on_unload(unsub) -WCAttrs = WindowCoveringCluster.AttributeDefs -WCT = WindowCoveringCluster.WindowCoveringType -WCCS = WindowCoveringCluster.ConfigStatus - -ZCL_TO_COVER_DEVICE_CLASS = { - WCT.Awning: CoverDeviceClass.AWNING, - WCT.Drapery: CoverDeviceClass.CURTAIN, - WCT.Projector_screen: CoverDeviceClass.SHADE, - WCT.Rollershade: CoverDeviceClass.SHADE, - WCT.Rollershade_two_motors: CoverDeviceClass.SHADE, - WCT.Rollershade_exterior: CoverDeviceClass.SHADE, - WCT.Rollershade_exterior_two_motors: CoverDeviceClass.SHADE, - WCT.Shutter: CoverDeviceClass.SHUTTER, - WCT.Tilt_blind_tilt_only: CoverDeviceClass.BLIND, - WCT.Tilt_blind_tilt_and_lift: CoverDeviceClass.BLIND, -} - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -class ZhaCover(ZhaEntity, CoverEntity): +class ZhaCover(ZHAEntity, CoverEntity): """Representation of a ZHA cover.""" - _attr_translation_key: str = "cover" + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA cover.""" + super().__init__(entity_data) - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this cover.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COVER) - assert cluster_handler - self._cover_cluster_handler: WindowCoveringClusterHandler = cast( - WindowCoveringClusterHandler, cluster_handler - ) - if self._cover_cluster_handler.window_covering_type: - self._attr_device_class: CoverDeviceClass | None = ( - ZCL_TO_COVER_DEVICE_CLASS.get( - self._cover_cluster_handler.window_covering_type - ) + if self.entity_data.entity.info_object.device_class is not None: + self._attr_device_class = CoverDeviceClass( + self.entity_data.entity.info_object.device_class ) - self._attr_supported_features: CoverEntityFeature = ( - self._determine_supported_features() - ) - self._target_lift_position: int | None = None - self._target_tilt_position: int | None = None - self._determine_initial_state() - def _determine_supported_features(self) -> CoverEntityFeature: - """Determine the supported cover features.""" - supported_features: CoverEntityFeature = ( - CoverEntityFeature.OPEN - | CoverEntityFeature.CLOSE - | CoverEntityFeature.STOP - | CoverEntityFeature.SET_POSITION - ) - if ( - self._cover_cluster_handler.window_covering_type - and self._cover_cluster_handler.window_covering_type - in ( - WCT.Shutter, - WCT.Tilt_blind_tilt_only, - WCT.Tilt_blind_tilt_and_lift, - ) - ): - supported_features |= CoverEntityFeature.SET_TILT_POSITION - supported_features |= CoverEntityFeature.OPEN_TILT - supported_features |= CoverEntityFeature.CLOSE_TILT - supported_features |= CoverEntityFeature.STOP_TILT - return supported_features + features = CoverEntityFeature(0) + zha_features: ZHACoverEntityFeature = self.entity_data.entity.supported_features - def _determine_initial_state(self) -> None: - """Determine the initial state of the cover.""" - if ( - self._cover_cluster_handler.window_covering_type - and self._cover_cluster_handler.window_covering_type - in ( - WCT.Shutter, - WCT.Tilt_blind_tilt_only, - WCT.Tilt_blind_tilt_and_lift, - ) - ): - self._determine_state( - self.current_cover_tilt_position, is_lift_update=False - ) - if ( - self._cover_cluster_handler.window_covering_type - == WCT.Tilt_blind_tilt_and_lift - ): - state = self._state - self._determine_state(self.current_cover_position) - if state == STATE_OPEN and self._state == STATE_CLOSED: - # let the tilt state override the lift state - self._state = STATE_OPEN - else: - self._determine_state(self.current_cover_position) + if ZHACoverEntityFeature.OPEN in zha_features: + features |= CoverEntityFeature.OPEN + if ZHACoverEntityFeature.CLOSE in zha_features: + features |= CoverEntityFeature.CLOSE + if ZHACoverEntityFeature.SET_POSITION in zha_features: + features |= CoverEntityFeature.SET_POSITION + if ZHACoverEntityFeature.STOP in zha_features: + features |= CoverEntityFeature.STOP + if ZHACoverEntityFeature.OPEN_TILT in zha_features: + features |= CoverEntityFeature.OPEN_TILT + if ZHACoverEntityFeature.CLOSE_TILT in zha_features: + features |= CoverEntityFeature.CLOSE_TILT + if ZHACoverEntityFeature.STOP_TILT in zha_features: + features |= CoverEntityFeature.STOP_TILT + if ZHACoverEntityFeature.SET_TILT_POSITION in zha_features: + features |= CoverEntityFeature.SET_TILT_POSITION - def _determine_state(self, position_or_tilt, is_lift_update=True) -> None: - """Determine the state of the cover. + self._attr_supported_features = features - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - if is_lift_update: - target = self._target_lift_position - current = self.current_cover_position - else: - target = self._target_tilt_position - current = self.current_cover_tilt_position - - if position_or_tilt == 100: - self._state = STATE_CLOSED - return - if target is not None and target != current: - # we are mid transition and shouldn't update the state - return - self._state = STATE_OPEN - - async def async_added_to_hass(self) -> None: - """Run when the cover entity is about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cover_cluster_handler, SIGNAL_ATTR_UPDATED, self.zcl_attribute_updated - ) + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state + return { + "target_lift_position": state.get("target_lift_position"), + "target_tilt_position": state.get("target_tilt_position"), + } @property def is_closed(self) -> bool | None: - """Return True if the cover is closed. - - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - if self.current_cover_position is None: - return None - return self.current_cover_position == 0 + """Return True if the cover is closed.""" + return self.entity_data.entity.is_closed @property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state == STATE_OPENING + return self.entity_data.entity.is_opening @property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state == STATE_CLOSING + return self.entity_data.entity.is_closing @property def current_cover_position(self) -> int | None: - """Return the current position of ZHA cover. - - In HA None is unknown, 0 is closed, 100 is fully open. - In ZCL 0 is fully open, 100 is fully closed. - Keep in mind the values have already been flipped to match HA - in the WindowCovering cluster handler - """ - return self._cover_cluster_handler.current_position_lift_percentage + """Return the current position of ZHA cover.""" + return self.entity_data.entity.current_cover_position @property def current_cover_tilt_position(self) -> int | None: """Return the current tilt position of the cover.""" - return self._cover_cluster_handler.current_position_tilt_percentage - - @callback - def zcl_attribute_updated(self, attr_id, attr_name, value): - """Handle position update from cluster handler.""" - if attr_id in ( - WCAttrs.current_position_lift_percentage.id, - WCAttrs.current_position_tilt_percentage.id, - ): - value = ( - self.current_cover_position - if attr_id == WCAttrs.current_position_lift_percentage.id - else self.current_cover_tilt_position - ) - self._determine_state( - value, - is_lift_update=attr_id == WCAttrs.current_position_lift_percentage.id, - ) - self.async_write_ha_state() - - @callback - def async_update_state(self, state): - """Handle state update from HA operations below.""" - _LOGGER.debug("async_update_state=%s", state) - self._state = state - self.async_write_ha_state() + return self.entity_data.entity.current_cover_tilt_position + @convert_zha_error_to_ha_error async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" - res = await self._cover_cluster_handler.up_open() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover: {res[1]}") - self.async_update_state(STATE_OPENING) + await self.entity_data.entity.async_open_cover() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_open_cover_tilt(self, **kwargs: Any) -> None: """Open the cover tilt.""" - # 0 is open in ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage(0) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover tilt: {res[1]}") - self.async_update_state(STATE_OPENING) + await self.entity_data.entity.async_open_cover_tilt() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" - res = await self._cover_cluster_handler.down_close() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover: {res[1]}") - self.async_update_state(STATE_CLOSING) + await self.entity_data.entity.async_close_cover() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Close the cover tilt.""" - # 100 is closed in ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage(100) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover tilt: {res[1]}") - self.async_update_state(STATE_CLOSING) + await self.entity_data.entity.async_close_cover_tilt() + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - self._target_lift_position = kwargs[ATTR_POSITION] - assert self._target_lift_position is not None - assert self.current_cover_position is not None - # the 100 - value is because we need to invert the value before giving it to ZCL - res = await self._cover_cluster_handler.go_to_lift_percentage( - 100 - self._target_lift_position - ) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover position: {res[1]}") - self.async_update_state( - STATE_CLOSING - if self._target_lift_position < self.current_cover_position - else STATE_OPENING + await self.entity_data.entity.async_set_cover_position( + position=kwargs.get(ATTR_POSITION) ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" - self._target_tilt_position = kwargs[ATTR_TILT_POSITION] - assert self._target_tilt_position is not None - assert self.current_cover_tilt_position is not None - # the 100 - value is because we need to invert the value before giving it to ZCL - res = await self._cover_cluster_handler.go_to_tilt_percentage( - 100 - self._target_tilt_position + await self.entity_data.entity.async_set_cover_tilt_position( + tilt_position=kwargs.get(ATTR_TILT_POSITION) ) - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover tilt position: {res[1]}") - self.async_update_state( - STATE_CLOSING - if self._target_tilt_position < self.current_cover_tilt_position - else STATE_OPENING - ) - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - res = await self._cover_cluster_handler.stop() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - self._target_lift_position = self.current_cover_position - self._determine_state(self.current_cover_position) self.async_write_ha_state() + @convert_zha_error_to_ha_error + async def async_stop_cover(self, **kwargs: Any) -> None: + """Stop the cover.""" + await self.entity_data.entity.async_stop_cover() + self.async_write_ha_state() + + @convert_zha_error_to_ha_error async def async_stop_cover_tilt(self, **kwargs: Any) -> None: """Stop the cover tilt.""" - res = await self._cover_cluster_handler.stop() - if res[1] is not Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - self._target_tilt_position = self.current_cover_tilt_position - self._determine_state(self.current_cover_tilt_position, is_lift_update=False) - self.async_write_ha_state() - - -@MULTI_MATCH( - cluster_handler_names={ - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_SHADE, - } -) -class Shade(ZhaEntity, CoverEntity): - """ZHA Shade.""" - - _attr_device_class = CoverDeviceClass.SHADE - _attr_translation_key: str = "shade" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs, - ) -> None: - """Initialize the ZHA light.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - self._level_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_LEVEL] - self._position: int | None = None - self._is_open: bool | None = None - - @property - def current_cover_position(self) -> int | None: - """Return current position of cover. - - None is unknown, 0 is closed, 100 is fully open. - """ - return self._position - - @property - def is_closed(self) -> bool | None: - """Return True if shade is closed.""" - if self._is_open is None: - return None - return not self._is_open - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_set_open_closed, - ) - self.async_accept_signal( - self._level_cluster_handler, SIGNAL_SET_LEVEL, self.async_set_level - ) - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._is_open = last_state.state == STATE_OPEN - if ATTR_CURRENT_POSITION in last_state.attributes: - self._position = last_state.attributes[ATTR_CURRENT_POSITION] - - @callback - def async_set_open_closed(self, attr_id: int, attr_name: str, value: bool) -> None: - """Set open/closed state.""" - self._is_open = bool(value) + await self.entity_data.entity.async_stop_cover_tilt() self.async_write_ha_state() @callback - def async_set_level(self, value: int) -> None: - """Set the reported position.""" - value = max(0, min(255, value)) - self._position = int(value * 100 / 255) - self.async_write_ha_state() + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the window cover.""" - res = await self._on_off_cluster_handler.on() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to open cover: {res[1]}") + # Shades are a subtype of cover that do not need external state restored + if isinstance(self.entity_data.entity, ZhaShade): + return - self._is_open = True - self.async_write_ha_state() - - async def async_close_cover(self, **kwargs: Any) -> None: - """Close the window cover.""" - res = await self._on_off_cluster_handler.off() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to close cover: {res[1]}") - - self._is_open = False - self.async_write_ha_state() - - async def async_set_cover_position(self, **kwargs: Any) -> None: - """Move the roller shutter to a specific position.""" - new_pos = kwargs[ATTR_POSITION] - res = await self._level_cluster_handler.move_to_level_with_on_off( - new_pos * 255 / 100, 1 + # Same as `light`, some entity state is not derived from ZCL attributes + self.entity_data.entity.restore_external_state_attributes( + state=state.state, + target_lift_position=state.attributes.get("target_lift_position"), + target_tilt_position=state.attributes.get("target_tilt_position"), ) - - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to set cover position: {res[1]}") - - self._position = new_pos - self.async_write_ha_state() - - async def async_stop_cover(self, **kwargs: Any) -> None: - """Stop the cover.""" - res = await self._level_cluster_handler.stop() - if res[1] != Status.SUCCESS: - raise HomeAssistantError(f"Failed to stop cover: {res[1]}") - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_LEVEL, CLUSTER_HANDLER_ON_OFF}, - manufacturers="Keen Home Inc", -) -class KeenVent(Shade): - """Keen vent cover.""" - - _attr_device_class = CoverDeviceClass.DAMPER - _attr_translation_key: str = "keen_vent" - - async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover.""" - position = self._position or 100 - await asyncio.gather( - self._level_cluster_handler.move_to_level_with_on_off( - position * 255 / 100, 1 - ), - self._on_off_cluster_handler.on(), - ) - - self._is_open = True - self._position = position - self.async_write_ha_state() diff --git a/homeassistant/components/zha/device_action.py b/homeassistant/components/zha/device_action.py index a0f16d61f41..b4b40880734 100644 --- a/homeassistant/components/zha/device_action.py +++ b/homeassistant/components/zha/device_action.py @@ -5,20 +5,25 @@ from __future__ import annotations from typing import Any import voluptuous as vol +from zha.exceptions import ZHAException +from zha.zigbee.cluster_handlers.const import ( + CLUSTER_HANDLER_IAS_WD, + CLUSTER_HANDLER_INOVELLI, +) +from zha.zigbee.cluster_handlers.manufacturerspecific import ( + AllLEDEffectType, + SingleLEDEffectType, +) from homeassistant.components.device_automation import InvalidDeviceAutomationConfig from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_TYPE from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv from homeassistant.helpers.typing import ConfigType, TemplateVarsType -from . import DOMAIN -from .core.cluster_handlers.manufacturerspecific import ( - AllLEDEffectType, - SingleLEDEffectType, -) -from .core.const import CLUSTER_HANDLER_IAS_WD, CLUSTER_HANDLER_INOVELLI -from .core.helpers import async_get_zha_device +from .const import DOMAIN +from .helpers import async_get_zha_device_proxy from .websocket_api import SERVICE_WARNING_DEVICE_SQUAWK, SERVICE_WARNING_DEVICE_WARN # mypy: disallow-any-generics @@ -144,7 +149,7 @@ async def async_get_actions( ) -> list[dict[str, str]]: """List device actions.""" try: - zha_device = async_get_zha_device(hass, device_id) + zha_device = async_get_zha_device_proxy(hass, device_id).device except (KeyError, AttributeError): return [] cluster_handlers = [ @@ -181,7 +186,7 @@ async def _execute_service_based_action( action_type = config[CONF_TYPE] service_name = SERVICE_NAMES[action_type] try: - zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) + zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device except (KeyError, AttributeError): return @@ -201,7 +206,7 @@ async def _execute_cluster_handler_command_based_action( action_type = config[CONF_TYPE] cluster_handler_name = CLUSTER_HANDLER_MAPPINGS[action_type] try: - zha_device = async_get_zha_device(hass, config[CONF_DEVICE_ID]) + zha_device = async_get_zha_device_proxy(hass, config[CONF_DEVICE_ID]).device except (KeyError, AttributeError): return @@ -224,7 +229,10 @@ async def _execute_cluster_handler_command_based_action( f" {action_type}" ) - await getattr(action_cluster_handler, action_type)(**config) + try: + await getattr(action_cluster_handler, action_type)(**config) + except ZHAException as err: + raise HomeAssistantError(err) from err ZHA_ACTION_TYPES = { diff --git a/homeassistant/components/zha/device_tracker.py b/homeassistant/components/zha/device_tracker.py index 9c96fd0e346..247219777f4 100644 --- a/homeassistant/components/zha/device_tracker.py +++ b/homeassistant/components/zha/device_tracker.py @@ -3,28 +3,21 @@ from __future__ import annotations import functools -import time from homeassistant.components.device_tracker import ScannerEntity, SourceType from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_POWER_CONFIGURATION, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -from .sensor import Battery - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.DEVICE_TRACKER) async def async_setup_entry( @@ -40,92 +33,48 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, + async_add_entities, + ZHADeviceScannerEntity, + entities_to_create, ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) -class ZHADeviceScannerEntity(ScannerEntity, ZhaEntity): +class ZHADeviceScannerEntity(ScannerEntity, ZHAEntity): """Represent a tracked device.""" _attr_should_poll = True # BaseZhaEntity defaults to False _attr_name: str = "Device scanner" - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Initialize the ZHA device tracker.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._battery_cluster_handler = self.cluster_handlers.get( - CLUSTER_HANDLER_POWER_CONFIGURATION - ) - self._connected = False - self._keepalive_interval = 60 - self._battery_level = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._battery_cluster_handler: - self.async_accept_signal( - self._battery_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_battery_percentage_remaining_updated, - ) - - async def async_update(self) -> None: - """Handle polling.""" - if self.zha_device.last_seen is None: - self._connected = False - else: - difference = time.time() - self.zha_device.last_seen - if difference > self._keepalive_interval: - self._connected = False - else: - self._connected = True - @property - def is_connected(self): + def is_connected(self) -> bool: """Return true if the device is connected to the network.""" - return self._connected + return self.entity_data.entity.is_connected @property def source_type(self) -> SourceType: """Return the source type, eg gps or router, of the device.""" return SourceType.ROUTER - @callback - def async_battery_percentage_remaining_updated(self, attr_id, attr_name, value): - """Handle tracking.""" - if attr_name != "battery_percentage_remaining": - return - self.debug("battery_percentage_remaining updated: %s", value) - self._connected = True - self._battery_level = Battery.formatter(value) - self.async_write_ha_state() - @property - def battery_level(self): + def battery_level(self) -> int | None: """Return the battery level of the device. Percentage from 0-100. """ - return self._battery_level + return self.entity_data.entity.battery_level - @property # type: ignore[misc] - def device_info( - self, - ) -> DeviceInfo: + @property # type: ignore[explicit-override, misc] + def device_info(self) -> DeviceInfo: """Return device info.""" # We opt ZHA device tracker back into overriding this method because # it doesn't track IP-based devices. - # Call Super because ScannerEntity overrode it. - # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 - return ZhaEntity.device_info.fget(self) # type: ignore[attr-defined] + return ZHAEntity.device_info.__get__(self) @property def unique_id(self) -> str: """Return unique ID.""" # Call Super because ScannerEntity overrode it. - # mypy doesn't know about fget: https://github.com/python/mypy/issues/6185 - return ZhaEntity.unique_id.fget(self) # type: ignore[attr-defined] + return ZHAEntity.unique_id.__get__(self) diff --git a/homeassistant/components/zha/device_trigger.py b/homeassistant/components/zha/device_trigger.py index a2ae734b8fc..a134d2aa59b 100644 --- a/homeassistant/components/zha/device_trigger.py +++ b/homeassistant/components/zha/device_trigger.py @@ -1,6 +1,7 @@ """Provides device automations for ZHA devices that emit events.""" import voluptuous as vol +from zha.application.const import ZHA_EVENT from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA from homeassistant.components.device_automation.exceptions import ( @@ -13,9 +14,8 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import DOMAIN as ZHA_DOMAIN -from .core.const import ZHA_EVENT -from .core.helpers import async_get_zha_device, get_zha_data +from .const import DOMAIN as ZHA_DOMAIN +from .helpers import async_get_zha_device_proxy, get_zha_data CONF_SUBTYPE = "subtype" DEVICE = "device" @@ -31,7 +31,7 @@ def _get_device_trigger_data(hass: HomeAssistant, device_id: str) -> tuple[str, # First, try checking to see if the device itself is accessible try: - zha_device = async_get_zha_device(hass, device_id) + zha_device = async_get_zha_device_proxy(hass, device_id).device except ValueError: pass else: diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index fff816777c0..bc4738d032a 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -6,6 +6,18 @@ import dataclasses from importlib.metadata import version from typing import Any +from zha.application.const import ( + ATTR_ATTRIBUTE_NAME, + ATTR_DEVICE_TYPE, + ATTR_IEEE, + ATTR_IN_CLUSTERS, + ATTR_OUT_CLUSTERS, + ATTR_PROFILE_ID, + ATTR_VALUE, + UNKNOWN, +) +from zha.application.gateway import Gateway +from zha.zigbee.device import Device from zigpy.config import CONF_NWK_EXTENDED_PAN_ID from zigpy.profiles import PROFILES from zigpy.types import Channels @@ -17,20 +29,13 @@ from homeassistant.const import CONF_ID, CONF_NAME, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .core.const import ( - ATTR_ATTRIBUTE_NAME, - ATTR_DEVICE_TYPE, - ATTR_IEEE, - ATTR_IN_CLUSTERS, - ATTR_OUT_CLUSTERS, - ATTR_PROFILE_ID, - ATTR_VALUE, - CONF_ALARM_MASTER_CODE, - UNKNOWN, +from .const import CONF_ALARM_MASTER_CODE +from .helpers import ( + ZHADeviceProxy, + async_get_zha_device_proxy, + get_zha_data, + get_zha_gateway, ) -from .core.device import ZHADevice -from .core.gateway import ZHAGateway -from .core.helpers import async_get_zha_device, get_zha_data, get_zha_gateway KEYS_TO_REDACT = { ATTR_IEEE, @@ -65,7 +70,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" zha_data = get_zha_data(hass) - gateway: ZHAGateway = get_zha_gateway(hass) + gateway: Gateway = get_zha_gateway(hass) app = gateway.application_controller energy_scan = await app.energy_scan( @@ -88,6 +93,7 @@ async def async_get_config_entry_diagnostics( "zigpy_znp": version("zigpy_znp"), "zigpy_zigate": version("zigpy-zigate"), "zhaquirks": version("zha-quirks"), + "zha": version("zha"), }, "devices": [ { @@ -106,13 +112,15 @@ async def async_get_device_diagnostics( hass: HomeAssistant, config_entry: ConfigEntry, device: dr.DeviceEntry ) -> dict[str, Any]: """Return diagnostics for a device.""" - zha_device: ZHADevice = async_get_zha_device(hass, device.id) - device_info: dict[str, Any] = zha_device.zha_device_info - device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data(zha_device) + zha_device_proxy: ZHADeviceProxy = async_get_zha_device_proxy(hass, device.id) + device_info: dict[str, Any] = zha_device_proxy.zha_device_info + device_info[CLUSTER_DETAILS] = get_endpoint_cluster_attr_data( + zha_device_proxy.device + ) return async_redact_data(device_info, KEYS_TO_REDACT) -def get_endpoint_cluster_attr_data(zha_device: ZHADevice) -> dict: +def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: """Return endpoint cluster attribute data.""" cluster_details = {} for ep_id, endpoint in zha_device.device.endpoints.items(): diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index f10e377dc46..6db0ffad964 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -6,84 +6,70 @@ import asyncio from collections.abc import Callable import functools import logging -from typing import TYPE_CHECKING, Any, Self +from typing import Any -from zigpy.quirks.v2 import EntityMetadata, EntityType +from zha.mixins import LogMixin -from homeassistant.const import ATTR_NAME, EntityCategory -from homeassistant.core import CALLBACK_TYPE, Event, EventStateChangedData, callback -from homeassistant.helpers import entity -from homeassistant.helpers.debounce import Debouncer +from homeassistant.const import ATTR_MANUFACTURER, ATTR_MODEL, ATTR_NAME, EntityCategory +from homeassistant.core import State, callback from homeassistant.helpers.device_registry import CONNECTION_ZIGBEE, DeviceInfo -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) -from homeassistant.helpers.event import async_track_state_change_event +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity from homeassistant.helpers.restore_state import RestoreEntity -from .core.const import ( - ATTR_MANUFACTURER, - ATTR_MODEL, - DOMAIN, - SIGNAL_GROUP_ENTITY_REMOVED, - SIGNAL_GROUP_MEMBERSHIP_CHANGE, - SIGNAL_REMOVE, -) -from .core.helpers import LogMixin, get_zha_gateway - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice +from .const import DOMAIN +from .helpers import SIGNAL_REMOVE_ENTITIES, EntityData, convert_zha_error_to_ha_error _LOGGER = logging.getLogger(__name__) -ENTITY_SUFFIX = "entity_suffix" -DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY = 0.5 - -class BaseZhaEntity(LogMixin, entity.Entity): - """A base class for ZHA entities.""" - - _unique_id_suffix: str | None = None - """suffix to add to the unique_id of the entity. Used for multi - entities using the same cluster handler/cluster id for the entity.""" +class ZHAEntity(LogMixin, RestoreEntity, Entity): + """ZHA eitity.""" _attr_has_entity_name = True _attr_should_poll = False + remove_future: asyncio.Future[Any] - def __init__(self, unique_id: str, zha_device: ZHADevice, **kwargs: Any) -> None: + def __init__(self, entity_data: EntityData, *args, **kwargs) -> None: """Init ZHA entity.""" - self._unique_id: str = unique_id - if self._unique_id_suffix: - self._unique_id += f"-{self._unique_id_suffix}" - self._state: Any = None - self._extra_state_attributes: dict[str, Any] = {} - self._zha_device = zha_device + super().__init__(*args, **kwargs) + self.entity_data: EntityData = entity_data self._unsubs: list[Callable[[], None]] = [] - @property - def unique_id(self) -> str: - """Return a unique ID.""" - return self._unique_id + if self.entity_data.entity.icon is not None: + # Only custom quirks will realistically set an icon + self._attr_icon = self.entity_data.entity.icon + + meta = self.entity_data.entity.info_object + self._attr_unique_id = meta.unique_id + + if meta.translation_key is not None: + self._attr_translation_key = meta.translation_key + elif meta.fallback_name is not None: + # Only custom quirks will create entities with just a fallback name! + # + # This is to allow local development and to register niche devices, since + # their translation_key will probably never be added to `zha/strings.json`. + self._attr_name = meta.fallback_name + + if meta.entity_category is not None: + self._attr_entity_category = EntityCategory(meta.entity_category) + + self._attr_entity_registry_enabled_default = ( + meta.entity_registry_enabled_default + ) @property - def zha_device(self) -> ZHADevice: - """Return the ZHA device this entity is attached to.""" - return self._zha_device - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device specific state attributes.""" - return self._extra_state_attributes + def available(self) -> bool: + """Return entity availability.""" + return self.entity_data.device_proxy.device.available @property def device_info(self) -> DeviceInfo: """Return a device description for device registry.""" - zha_device_info = self._zha_device.device_info + zha_device_info = self.entity_data.device_proxy.device_info ieee = zha_device_info["ieee"] - - zha_gateway = get_zha_gateway(self.hass) + zha_gateway = self.entity_data.device_proxy.gateway_proxy.gateway return DeviceInfo( connections={(CONNECTION_ZIGBEE, ieee)}, @@ -95,265 +81,67 @@ class BaseZhaEntity(LogMixin, entity.Entity): ) @callback - def async_state_changed(self) -> None: + def _handle_entity_events(self, event: Any) -> None: """Entity state changed.""" + self.debug("Handling event from entity: %s", event) self.async_write_ha_state() - @callback - def async_update_state_attribute(self, key: str, value: Any) -> None: - """Update a single device state attribute.""" - self._extra_state_attributes.update({key: value}) - self.async_write_ha_state() + async def async_added_to_hass(self) -> None: + """Run when about to be added to hass.""" + self.remove_future = self.hass.loop.create_future() + self._unsubs.append( + self.entity_data.entity.on_all_events(self._handle_entity_events) + ) + remove_signal = ( + f"{SIGNAL_REMOVE_ENTITIES}_group_{self.entity_data.group_proxy.group.group_id}" + if self.entity_data.is_group_entity + and self.entity_data.group_proxy is not None + else f"{SIGNAL_REMOVE_ENTITIES}_{self.entity_data.device_proxy.device.ieee}" + ) + self._unsubs.append( + async_dispatcher_connect( + self.hass, + remove_signal, + functools.partial(self.async_remove, force_remove=True), + ) + ) + self.entity_data.device_proxy.gateway_proxy.register_entity_reference( + self.entity_id, + self.entity_data, + self.device_info, + self.remove_future, + ) + + if (state := await self.async_get_last_state()) is None: + return + + self.restore_external_state_attributes(state) @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: - """Set the entity state.""" + def restore_external_state_attributes(self, state: State) -> None: + """Restore ephemeral external state from Home Assistant back into ZHA.""" + + # Some operations rely on extra state that is not maintained in the ZCL + # attribute cache. Until ZHA is able to maintain its own persistent state (or + # provides a more generic hook to utilize HA to do this), we directly restore + # them. async def async_will_remove_from_hass(self) -> None: """Disconnect entity object when removed.""" for unsub in self._unsubs[:]: unsub() self._unsubs.remove(unsub) + await super().async_will_remove_from_hass() + self.remove_future.set_result(True) - @callback - def async_accept_signal( - self, - cluster_handler: ClusterHandler | None, - signal: str, - func: Callable[..., Any], - signal_override=False, - ): - """Accept a signal from a cluster handler.""" - unsub = None - if signal_override: - unsub = async_dispatcher_connect(self.hass, signal, func) - else: - assert cluster_handler - unsub = async_dispatcher_connect( - self.hass, f"{cluster_handler.unique_id}_{signal}", func - ) - self._unsubs.append(unsub) + @convert_zha_error_to_ha_error + async def async_update(self) -> None: + """Update the entity.""" + await self.entity_data.entity.async_update() + self.async_write_ha_state() def log(self, level: int, msg: str, *args, **kwargs): """Log a message.""" msg = f"%s: {msg}" args = (self.entity_id, *args) _LOGGER.log(level, msg, *args, **kwargs) - - -class ZhaEntity(BaseZhaEntity, RestoreEntity): - """A base class for non group ZHA entities.""" - - remove_future: asyncio.Future[Any] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init ZHA entity.""" - super().__init__(unique_id, zha_device, **kwargs) - - self.cluster_handlers: dict[str, ClusterHandler] = {} - for cluster_handler in cluster_handlers: - self.cluster_handlers[cluster_handler.name] = cluster_handler - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: EntityMetadata) -> None: - """Init this entity from the quirks metadata.""" - if entity_metadata.initially_disabled: - self._attr_entity_registry_enabled_default = False - - has_device_class = hasattr(entity_metadata, "device_class") - has_attribute_name = hasattr(entity_metadata, "attribute_name") - has_command_name = hasattr(entity_metadata, "command_name") - if not has_device_class or ( - has_device_class and entity_metadata.device_class is None - ): - if entity_metadata.translation_key: - self._attr_translation_key = entity_metadata.translation_key - elif has_attribute_name: - self._attr_translation_key = entity_metadata.attribute_name - elif has_command_name: - self._attr_translation_key = entity_metadata.command_name - if has_attribute_name: - self._unique_id_suffix = entity_metadata.attribute_name - elif has_command_name: - self._unique_id_suffix = entity_metadata.command_name - if entity_metadata.entity_type is EntityType.CONFIG: - self._attr_entity_category = EntityCategory.CONFIG - elif entity_metadata.entity_type is EntityType.DIAGNOSTIC: - self._attr_entity_category = EntityCategory.DIAGNOSTIC - else: - self._attr_entity_category = None - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._zha_device.available - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - self.remove_future = self.hass.loop.create_future() - self.async_accept_signal( - None, - f"{SIGNAL_REMOVE}_{self.zha_device.ieee}", - functools.partial(self.async_remove, force_remove=True), - signal_override=True, - ) - - if last_state := await self.async_get_last_state(): - self.async_restore_last_state(last_state) - - self.async_accept_signal( - None, - f"{self.zha_device.available_signal}_entity", - self.async_state_changed, - signal_override=True, - ) - self._zha_device.gateway.register_entity_reference( - self._zha_device.ieee, - self.entity_id, - self._zha_device, - self.cluster_handlers, - self.device_info, - self.remove_future, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - await super().async_will_remove_from_hass() - self.zha_device.gateway.remove_entity_reference(self) - self.remove_future.set_result(True) - - @callback - def async_restore_last_state(self, last_state) -> None: - """Restore previous state.""" - - async def async_update(self) -> None: - """Retrieve latest state.""" - tasks = [ - cluster_handler.async_update() - for cluster_handler in self.cluster_handlers.values() - if hasattr(cluster_handler, "async_update") - ] - if tasks: - await asyncio.gather(*tasks) - - -class ZhaGroupEntity(BaseZhaEntity): - """A base class for ZHA group entities.""" - - # The group name is set in the initializer - _attr_name: str - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a ZHA group.""" - super().__init__(unique_id, zha_device, **kwargs) - self._available = False - self._group = zha_device.gateway.groups.get(group_id) - self._group_id: int = group_id - self._entity_ids: list[str] = entity_ids - self._async_unsub_state_changed: CALLBACK_TYPE | None = None - self._handled_group_membership = False - self._change_listener_debouncer: Debouncer | None = None - self._update_group_from_child_delay = DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY - - self._attr_name = self._group.name - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._available - - @classmethod - def create_entity( - cls, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> Self | None: - """Group Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls(entity_ids, unique_id, group_id, zha_device, **kwargs) - - async def _handle_group_membership_changed(self): - """Handle group membership changed.""" - # Make sure we don't call remove twice as members are removed - if self._handled_group_membership: - return - - self._handled_group_membership = True - await self.async_remove(force_remove=True) - if len(self._group.members) >= 2: - async_dispatcher_send( - self.hass, SIGNAL_GROUP_ENTITY_REMOVED, self._group_id - ) - - async def async_added_to_hass(self) -> None: - """Register callbacks.""" - await super().async_added_to_hass() - await self.async_update() - - self.async_accept_signal( - None, - f"{SIGNAL_GROUP_MEMBERSHIP_CHANGE}_0x{self._group_id:04x}", - self._handle_group_membership_changed, - signal_override=True, - ) - - if self._change_listener_debouncer is None: - self._change_listener_debouncer = Debouncer( - self.hass, - _LOGGER, - cooldown=self._update_group_from_child_delay, - immediate=False, - function=functools.partial(self.async_update_ha_state, True), - ) - self.async_on_remove(self._change_listener_debouncer.async_cancel) - self._async_unsub_state_changed = async_track_state_change_event( - self.hass, self._entity_ids, self.async_state_changed_listener - ) - - @callback - def async_state_changed_listener(self, event: Event[EventStateChangedData]) -> None: - """Handle child updates.""" - # Delay to ensure that we get updates from all members before updating the group - assert self._change_listener_debouncer - self._change_listener_debouncer.async_schedule_call() - - async def async_will_remove_from_hass(self) -> None: - """Handle removal from Home Assistant.""" - await super().async_will_remove_from_hass() - if self._async_unsub_state_changed is not None: - self._async_unsub_state_changed() - self._async_unsub_state_changed = None - - async def async_update(self) -> None: - """Update the state of the group entity.""" diff --git a/homeassistant/components/zha/fan.py b/homeassistant/components/zha/fan.py index 3677befb76e..767c0d4cfb7 100644 --- a/homeassistant/components/zha/fan.py +++ b/homeassistant/components/zha/fan.py @@ -2,54 +2,26 @@ from __future__ import annotations -from abc import abstractmethod import functools -import math from typing import Any -from zigpy.zcl.clusters import hvac +from zha.application.platforms.fan.const import FanEntityFeature as ZHAFanEntityFeature -from homeassistant.components.fan import ( - ATTR_PERCENTAGE, - ATTR_PRESET_MODE, - FanEntity, - FanEntityFeature, -) +from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.util.percentage import ( - percentage_to_ranged_value, - ranged_value_to_percentage, + +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from homeassistant.util.scaling import int_states_in_range - -from .core import discovery -from .core.cluster_handlers import wrap_zigpy_exceptions -from .core.const import CLUSTER_HANDLER_FAN, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity - -# Additional speeds in zigbee's ZCL -# Spec is unclear as to what this value means. On King Of Fans HBUniversal -# receiver, this means Very High. -PRESET_MODE_ON = "on" -# The fan speed is self-regulated -PRESET_MODE_AUTO = "auto" -# When the heated/cooled space is occupied, the fan is always on -PRESET_MODE_SMART = "smart" - -SPEED_RANGE = (1, 3) # off is not included -PRESET_MODES_TO_NAME = {4: PRESET_MODE_ON, 5: PRESET_MODE_AUTO, 6: PRESET_MODE_SMART} - -DEFAULT_ON_PERCENTAGE = 50 - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.FAN) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.FAN) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.FAN) async def async_setup_entry( @@ -65,50 +37,65 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZhaFan, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BaseFan(FanEntity): - """Base representation of a ZHA fan.""" +class ZhaFan(FanEntity, ZHAEntity): + """Representation of a ZHA fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED _attr_translation_key: str = "fan" + _enable_turn_on_off_backwards_compatibility = False + + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA fan.""" + super().__init__(entity_data) + features = FanEntityFeature(0) + zha_features: ZHAFanEntityFeature = self.entity_data.entity.supported_features + + if ZHAFanEntityFeature.DIRECTION in zha_features: + features |= FanEntityFeature.DIRECTION + if ZHAFanEntityFeature.OSCILLATE in zha_features: + features |= FanEntityFeature.OSCILLATE + if ZHAFanEntityFeature.PRESET_MODE in zha_features: + features |= FanEntityFeature.PRESET_MODE + if ZHAFanEntityFeature.SET_SPEED in zha_features: + features |= FanEntityFeature.SET_SPEED + if ZHAFanEntityFeature.TURN_ON in zha_features: + features |= FanEntityFeature.TURN_ON + if ZHAFanEntityFeature.TURN_OFF in zha_features: + features |= FanEntityFeature.TURN_OFF + + self._attr_supported_features = features + + @property + def preset_mode(self) -> str | None: + """Return the current preset mode.""" + return self.entity_data.entity.preset_mode @property def preset_modes(self) -> list[str]: """Return the available preset modes.""" - return list(self.preset_modes_to_name.values()) - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return PRESET_MODES_TO_NAME - - @property - def preset_name_to_mode(self) -> dict[str, int]: - """Return a dict from preset name to mode.""" - return {v: k for k, v in self.preset_modes_to_name.items()} + return self.entity_data.entity.preset_modes @property def default_on_percentage(self) -> int: """Return the default on percentage.""" - return DEFAULT_ON_PERCENTAGE + return self.entity_data.entity.default_on_percentage @property def speed_range(self) -> tuple[int, int]: """Return the range of speeds the fan supports. Off is not included.""" - return SPEED_RANGE + return self.entity_data.entity.speed_range @property def speed_count(self) -> int: """Return the number of speeds the fan supports.""" - return int_states_in_range(self.speed_range) + return self.entity_data.entity.speed_count + @convert_zha_error_to_ha_error async def async_turn_on( self, percentage: int | None = None, @@ -116,201 +103,30 @@ class BaseFan(FanEntity): **kwargs: Any, ) -> None: """Turn the entity on.""" - if percentage is None: - percentage = self.default_on_percentage - await self.async_set_percentage(percentage) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self.async_set_percentage(0) - - async def async_set_percentage(self, percentage: int) -> None: - """Set the speed percentage of the fan.""" - fan_mode = math.ceil(percentage_to_ranged_value(self.speed_range, percentage)) - await self._async_set_fan_mode(fan_mode) - - async def async_set_preset_mode(self, preset_mode: str) -> None: - """Set the preset mode for the fan.""" - await self._async_set_fan_mode(self.preset_name_to_mode[preset_mode]) - - @abstractmethod - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the fan.""" - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" - - -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_FAN) -class ZhaFan(BaseFan, ZhaEntity): - """Representation of a ZHA fan.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._fan_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_FAN) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._fan_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state + await self.entity_data.entity.async_turn_on( + percentage=percentage, preset_mode=preset_mode ) - - @property - def percentage(self) -> int | None: - """Return the current speed percentage.""" - if ( - self._fan_cluster_handler.fan_mode is None - or self._fan_cluster_handler.fan_mode > self.speed_range[1] - ): - return None - if self._fan_cluster_handler.fan_mode == 0: - return 0 - return ranged_value_to_percentage( - self.speed_range, self._fan_cluster_handler.fan_mode - ) - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode.""" - return self.preset_modes_to_name.get(self._fan_cluster_handler.fan_mode) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" self.async_write_ha_state() - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the fan.""" - await self._fan_cluster_handler.async_set_speed(fan_mode) - self.async_set_state(0, "fan_mode", fan_mode) + @convert_zha_error_to_ha_error + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + await self.entity_data.entity.async_turn_off() + self.async_write_ha_state() + @convert_zha_error_to_ha_error + async def async_set_percentage(self, percentage: int) -> None: + """Set the speed percentage of the fan.""" + await self.entity_data.entity.async_set_percentage(percentage=percentage) + self.async_write_ha_state() -@GROUP_MATCH() -class FanGroup(BaseFan, ZhaGroupEntity): - """Representation of a fan group.""" - - _attr_translation_key: str = "fan_group" - - def __init__( - self, entity_ids: list[str], unique_id: str, group_id: int, zha_device, **kwargs - ) -> None: - """Initialize a fan group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - self._available: bool = False - group = self.zha_device.gateway.get_group(self._group_id) - self._fan_cluster_handler = group.endpoint[hvac.Fan.cluster_id] - self._percentage = None - self._preset_mode = None + @convert_zha_error_to_ha_error + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set the preset mode for the fan.""" + await self.entity_data.entity.async_set_preset_mode(preset_mode=preset_mode) + self.async_write_ha_state() @property def percentage(self) -> int | None: """Return the current speed percentage.""" - return self._percentage - - @property - def preset_mode(self) -> str | None: - """Return the current preset mode.""" - return self._preset_mode - - async def _async_set_fan_mode(self, fan_mode: int) -> None: - """Set the fan mode for the group.""" - - with wrap_zigpy_exceptions(): - await self._fan_cluster_handler.write_attributes({"fan_mode": fan_mode}) - - self.async_set_state(0, "fan_mode", fan_mode) - - async def async_update(self) -> None: - """Attempt to retrieve on off state from the fan.""" - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - percentage_states: list[State] = [ - state for state in states if state.attributes.get(ATTR_PERCENTAGE) - ] - preset_mode_states: list[State] = [ - state for state in states if state.attributes.get(ATTR_PRESET_MODE) - ] - self._available = any(state.state != STATE_UNAVAILABLE for state in states) - - if percentage_states: - self._percentage = percentage_states[0].attributes[ATTR_PERCENTAGE] - self._preset_mode = None - elif preset_mode_states: - self._preset_mode = preset_mode_states[0].attributes[ATTR_PRESET_MODE] - self._percentage = None - else: - self._percentage = None - self._preset_mode = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await self.async_update() - await super().async_added_to_hass() - - -IKEA_SPEED_RANGE = (1, 10) # off is not included -IKEA_PRESET_MODES_TO_NAME = { - 1: PRESET_MODE_AUTO, - 2: "Speed 1", - 3: "Speed 1.5", - 4: "Speed 2", - 5: "Speed 2.5", - 6: "Speed 3", - 7: "Speed 3.5", - 8: "Speed 4", - 9: "Speed 4.5", - 10: "Speed 5", -} - - -@MULTI_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class IkeaFan(ZhaFan): - """Representation of an Ikea fan.""" - - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._fan_cluster_handler = self.cluster_handlers.get("ikea_airpurifier") - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return IKEA_PRESET_MODES_TO_NAME - - @property - def speed_range(self) -> tuple[int, int]: - """Return the range of speeds the fan supports. Off is not included.""" - return IKEA_SPEED_RANGE - - @property - def default_on_percentage(self) -> int: - """Return the default on percentage.""" - return int( - (100 / self.speed_count) * self.preset_name_to_mode[PRESET_MODE_AUTO] - ) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_FAN, - models={"HBUniversalCFRemote", "HDC52EastwindFan"}, -) -class KofFan(ZhaFan): - """Representation of a fan made by King Of Fans.""" - - _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE - - @property - def speed_range(self) -> tuple[int, int]: - """Return the range of speeds the fan supports. Off is not included.""" - return (1, 4) - - @property - def preset_modes_to_name(self) -> dict[int, str]: - """Return a dict from preset mode to name.""" - return {6: PRESET_MODE_SMART} + return self.entity_data.entity.percentage diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py new file mode 100644 index 00000000000..0691e2429d1 --- /dev/null +++ b/homeassistant/components/zha/helpers.py @@ -0,0 +1,1296 @@ +"""Helper functions for the ZHA integration.""" + +from __future__ import annotations + +import asyncio +import collections +from collections.abc import Awaitable, Callable, Coroutine, Mapping +import copy +import dataclasses +import enum +import functools +import itertools +import logging +import re +import time +from types import MappingProxyType +from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast + +import voluptuous as vol +from zha.application.const import ( + ATTR_CLUSTER_ID, + ATTR_DEVICE_IEEE, + ATTR_TYPE, + ATTR_UNIQUE_ID, + CLUSTER_TYPE_IN, + CLUSTER_TYPE_OUT, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + UNKNOWN_MANUFACTURER, + UNKNOWN_MODEL, + ZHA_CLUSTER_HANDLER_CFG_DONE, + ZHA_CLUSTER_HANDLER_MSG, + ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA, + ZHA_EVENT, + ZHA_GW_MSG, + ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO, + ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_GROUP_ADDED, + ZHA_GW_MSG_GROUP_INFO, + ZHA_GW_MSG_GROUP_MEMBER_ADDED, + ZHA_GW_MSG_GROUP_MEMBER_REMOVED, + ZHA_GW_MSG_GROUP_REMOVED, + ZHA_GW_MSG_RAW_INIT, + RadioType, +) +from zha.application.gateway import ( + ConnectionLostEvent, + DeviceFullInitEvent, + DeviceJoinedEvent, + DeviceLeftEvent, + DeviceRemovedEvent, + Gateway, + GroupEvent, + RawDeviceInitializedEvent, +) +from zha.application.helpers import ( + AlarmControlPanelOptions, + CoordinatorConfiguration, + DeviceOptions, + DeviceOverridesConfiguration, + LightOptions, + QuirksConfiguration, + ZHAConfiguration, + ZHAData, +) +from zha.application.platforms import GroupEntity, PlatformEntity +from zha.event import EventBase +from zha.exceptions import ZHAException +from zha.mixins import LogMixin +from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent +from zha.zigbee.device import ClusterHandlerConfigurationComplete, Device, ZHAEvent +from zha.zigbee.group import Group, GroupInfo, GroupMember +from zigpy.config import ( + CONF_DATABASE, + CONF_DEVICE, + CONF_DEVICE_PATH, + CONF_NWK, + CONF_NWK_CHANNEL, +) +import zigpy.exceptions +from zigpy.profiles import PROFILES +import zigpy.types +from zigpy.types import EUI64 +import zigpy.util +import zigpy.zcl +from zigpy.zcl.foundation import CommandSchema + +from homeassistant import __path__ as HOMEASSISTANT_PATH +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + is_multiprotocol_url, +) +from homeassistant.components.system_log import LogEntry +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_AREA_ID, + ATTR_DEVICE_ID, + ATTR_ENTITY_ID, + ATTR_MODEL, + ATTR_NAME, + Platform, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType + +from .const import ( + ATTR_ACTIVE_COORDINATOR, + ATTR_ATTRIBUTES, + ATTR_AVAILABLE, + ATTR_CLUSTER_NAME, + ATTR_DEVICE_TYPE, + ATTR_ENDPOINT_NAMES, + ATTR_IEEE, + ATTR_LAST_SEEN, + ATTR_LQI, + ATTR_MANUFACTURER, + ATTR_MANUFACTURER_CODE, + ATTR_NEIGHBORS, + ATTR_NWK, + ATTR_POWER_SOURCE, + ATTR_QUIRK_APPLIED, + ATTR_QUIRK_CLASS, + ATTR_QUIRK_ID, + ATTR_ROUTES, + ATTR_RSSI, + ATTR_SIGNATURE, + ATTR_SUCCESS, + CONF_ALARM_ARM_REQUIRES_CODE, + CONF_ALARM_FAILED_TRIES, + CONF_ALARM_MASTER_CODE, + CONF_ALWAYS_PREFER_XY_COLOR_MODE, + CONF_BAUDRATE, + CONF_CONSIDER_UNAVAILABLE_BATTERY, + CONF_CONSIDER_UNAVAILABLE_MAINS, + CONF_CUSTOM_QUIRKS_PATH, + CONF_DEFAULT_LIGHT_TRANSITION, + CONF_DEVICE_CONFIG, + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, + CONF_ENABLE_IDENTIFY_ON_JOIN, + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, + CONF_ENABLE_QUIRKS, + CONF_FLOW_CONTROL, + CONF_GROUP_MEMBERS_ASSUME_STATE, + CONF_RADIO_TYPE, + CONF_ZIGPY, + CUSTOM_CONFIGURATION, + DATA_ZHA, + DEFAULT_DATABASE_NAME, + DEVICE_PAIRING_STATUS, + DOMAIN, + ZHA_ALARM_OPTIONS, + ZHA_OPTIONS, +) + +if TYPE_CHECKING: + from logging import Filter, LogRecord + + from .entity import ZHAEntity + from .update import ZHAFirmwareUpdateCoordinator + + _LogFilterType = Filter | Callable[[LogRecord], bool] + +_P = ParamSpec("_P") +_EntityT = TypeVar("_EntityT", bound="ZHAEntity") + +_LOGGER = logging.getLogger(__name__) + +DEBUG_COMP_BELLOWS = "bellows" +DEBUG_COMP_ZHA = "homeassistant.components.zha" +DEBUG_LIB_ZHA = "zha" +DEBUG_COMP_ZIGPY = "zigpy" +DEBUG_COMP_ZIGPY_ZNP = "zigpy_znp" +DEBUG_COMP_ZIGPY_DECONZ = "zigpy_deconz" +DEBUG_COMP_ZIGPY_XBEE = "zigpy_xbee" +DEBUG_COMP_ZIGPY_ZIGATE = "zigpy_zigate" +DEBUG_LEVEL_CURRENT = "current" +DEBUG_LEVEL_ORIGINAL = "original" +DEBUG_LEVELS = { + DEBUG_COMP_BELLOWS: logging.DEBUG, + DEBUG_COMP_ZHA: logging.DEBUG, + DEBUG_COMP_ZIGPY: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZNP: logging.DEBUG, + DEBUG_COMP_ZIGPY_DECONZ: logging.DEBUG, + DEBUG_COMP_ZIGPY_XBEE: logging.DEBUG, + DEBUG_COMP_ZIGPY_ZIGATE: logging.DEBUG, + DEBUG_LIB_ZHA: logging.DEBUG, +} +DEBUG_RELAY_LOGGERS = [DEBUG_COMP_ZHA, DEBUG_COMP_ZIGPY, DEBUG_LIB_ZHA] +ZHA_GW_MSG_LOG_ENTRY = "log_entry" +ZHA_GW_MSG_LOG_OUTPUT = "log_output" +SIGNAL_REMOVE_ENTITIES = "zha_remove_entities" +GROUP_ENTITY_DOMAINS = [Platform.LIGHT, Platform.SWITCH, Platform.FAN] +SIGNAL_ADD_ENTITIES = "zha_add_entities" +ENTITIES = "entities" + +RX_ON_WHEN_IDLE = "rx_on_when_idle" +RELATIONSHIP = "relationship" +EXTENDED_PAN_ID = "extended_pan_id" +PERMIT_JOINING = "permit_joining" +DEPTH = "depth" + +DEST_NWK = "dest_nwk" +ROUTE_STATUS = "route_status" +MEMORY_CONSTRAINED = "memory_constrained" +MANY_TO_ONE = "many_to_one" +ROUTE_RECORD_REQUIRED = "route_record_required" +NEXT_HOP = "next_hop" + +USER_GIVEN_NAME = "user_given_name" +DEVICE_REG_ID = "device_reg_id" + + +class GroupEntityReference(NamedTuple): + """Reference to a group entity.""" + + name: str | None + original_name: str | None + entity_id: str + + +class ZHAGroupProxy(LogMixin): + """Proxy class to interact with the ZHA group instances.""" + + def __init__(self, group: Group, gateway_proxy: ZHAGatewayProxy) -> None: + """Initialize the gateway proxy.""" + self.group: Group = group + self.gateway_proxy: ZHAGatewayProxy = gateway_proxy + + @property + def group_info(self) -> dict[str, Any]: + """Return a group description for group.""" + return { + "name": self.group.name, + "group_id": self.group.group_id, + "members": [ + { + "endpoint_id": member.endpoint_id, + "device": self.gateway_proxy.device_proxies[ + member.device.ieee + ].zha_device_info, + "entities": [e._asdict() for e in self.associated_entities(member)], + } + for member in self.group.members + ], + } + + def associated_entities(self, member: GroupMember) -> list[GroupEntityReference]: + """Return the list of entities that were derived from this endpoint.""" + entity_registry = er.async_get(self.gateway_proxy.hass) + entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( + self.gateway_proxy.ha_entity_refs + ) + + entity_info = [] + + for entity_ref in entity_refs.get(member.device.ieee): # type: ignore[union-attr] + if not entity_ref.entity_data.is_group_entity: + continue + entity = entity_registry.async_get(entity_ref.ha_entity_id) + + if ( + entity is None + or entity_ref.entity_data.group_proxy is None + or entity_ref.entity_data.group_proxy.group.group_id + != member.group.group_id + ): + continue + + entity_info.append( + GroupEntityReference( + name=entity.name, + original_name=entity.original_name, + entity_id=entity_ref.ha_entity_id, + ) + ) + + return entity_info + + def log(self, level: int, msg: str, *args: Any, **kwargs) -> None: + """Log a message.""" + msg = f"[%s](%s): {msg}" + args = ( + f"0x{self.group.group_id:04x}", + self.group.endpoint.endpoint_id, + *args, + ) + _LOGGER.log(level, msg, *args, **kwargs) + + +class ZHADeviceProxy(EventBase): + """Proxy class to interact with the ZHA device instances.""" + + _ha_device_id: str + + def __init__(self, device: Device, gateway_proxy: ZHAGatewayProxy) -> None: + """Initialize the gateway proxy.""" + super().__init__() + self.device = device + self.gateway_proxy = gateway_proxy + self._unsubs: list[Callable[[], None]] = [] + self._unsubs.append(self.device.on_all_events(self._handle_event_protocol)) + + @property + def device_id(self) -> str: + """Return the HA device registry device id.""" + return self._ha_device_id + + @device_id.setter + def device_id(self, device_id: str) -> None: + """Set the HA device registry device id.""" + self._ha_device_id = device_id + + @property + def device_info(self) -> dict[str, Any]: + """Return a device description for device.""" + ieee = str(self.device.ieee) + time_struct = time.localtime(self.device.last_seen) + update_time = time.strftime("%Y-%m-%dT%H:%M:%S", time_struct) + return { + ATTR_IEEE: ieee, + ATTR_NWK: self.device.nwk, + ATTR_MANUFACTURER: self.device.manufacturer, + ATTR_MODEL: self.device.model, + ATTR_NAME: self.device.name or ieee, + ATTR_QUIRK_APPLIED: self.device.quirk_applied, + ATTR_QUIRK_CLASS: self.device.quirk_class, + ATTR_QUIRK_ID: self.device.quirk_id, + ATTR_MANUFACTURER_CODE: self.device.manufacturer_code, + ATTR_POWER_SOURCE: self.device.power_source, + ATTR_LQI: self.device.lqi, + ATTR_RSSI: self.device.rssi, + ATTR_LAST_SEEN: update_time, + ATTR_AVAILABLE: self.device.available, + ATTR_DEVICE_TYPE: self.device.device_type, + ATTR_SIGNATURE: self.device.zigbee_signature, + } + + @property + def zha_device_info(self) -> dict[str, Any]: + """Get ZHA device information.""" + device_info: dict[str, Any] = {} + device_info.update(self.device_info) + device_info[ATTR_ACTIVE_COORDINATOR] = self.device.is_active_coordinator + device_info[ENTITIES] = [ + { + ATTR_ENTITY_ID: entity_ref.ha_entity_id, + ATTR_NAME: entity_ref.ha_device_info[ATTR_NAME], + } + for entity_ref in self.gateway_proxy.ha_entity_refs[self.device.ieee] + ] + + topology = self.gateway_proxy.gateway.application_controller.topology + device_info[ATTR_NEIGHBORS] = [ + { + ATTR_DEVICE_TYPE: neighbor.device_type.name, + RX_ON_WHEN_IDLE: neighbor.rx_on_when_idle.name, + RELATIONSHIP: neighbor.relationship.name, + EXTENDED_PAN_ID: str(neighbor.extended_pan_id), + ATTR_IEEE: str(neighbor.ieee), + ATTR_NWK: str(neighbor.nwk), + PERMIT_JOINING: neighbor.permit_joining.name, + DEPTH: str(neighbor.depth), + ATTR_LQI: str(neighbor.lqi), + } + for neighbor in topology.neighbors[self.device.ieee] + ] + + device_info[ATTR_ROUTES] = [ + { + DEST_NWK: str(route.DstNWK), + ROUTE_STATUS: str(route.RouteStatus.name), + MEMORY_CONSTRAINED: bool(route.MemoryConstrained), + MANY_TO_ONE: bool(route.ManyToOne), + ROUTE_RECORD_REQUIRED: bool(route.RouteRecordRequired), + NEXT_HOP: str(route.NextHop), + } + for route in topology.routes[self.device.ieee] + ] + + # Return endpoint device type Names + names: list[dict[str, str]] = [] + for endpoint in ( + ep for epid, ep in self.device.device.endpoints.items() if epid + ): + profile = PROFILES.get(endpoint.profile_id) + if profile and endpoint.device_type is not None: + # DeviceType provides undefined enums + names.append({ATTR_NAME: profile.DeviceType(endpoint.device_type).name}) + else: + names.append( + { + ATTR_NAME: ( + f"unknown {endpoint.device_type} device_type " + f"of 0x{(endpoint.profile_id or 0xFFFF):04x} profile id" + ) + } + ) + device_info[ATTR_ENDPOINT_NAMES] = names + + device_registry = dr.async_get(self.gateway_proxy.hass) + reg_device = device_registry.async_get(self.device_id) + if reg_device is not None: + device_info[USER_GIVEN_NAME] = reg_device.name_by_user + device_info[DEVICE_REG_ID] = reg_device.id + device_info[ATTR_AREA_ID] = reg_device.area_id + return device_info + + @callback + def handle_zha_event(self, zha_event: ZHAEvent) -> None: + """Handle a ZHA event.""" + self.gateway_proxy.hass.bus.async_fire( + ZHA_EVENT, + { + ATTR_DEVICE_IEEE: str(zha_event.device_ieee), + ATTR_UNIQUE_ID: zha_event.unique_id, + ATTR_DEVICE_ID: self.device_id, + **zha_event.data, + }, + ) + + @callback + def handle_zha_channel_configure_reporting( + self, event: ClusterConfigureReportingEvent + ) -> None: + """Handle a ZHA cluster configure reporting event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_CFG_RPT, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + ATTR_CLUSTER_NAME: event.cluster_name, + ATTR_CLUSTER_ID: event.cluster_id, + ATTR_ATTRIBUTES: event.attributes, + }, + }, + ) + + @callback + def handle_zha_channel_cfg_done( + self, event: ClusterHandlerConfigurationComplete + ) -> None: + """Handle a ZHA cluster configure reporting event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_CFG_DONE, + }, + ) + + @callback + def handle_zha_channel_bind(self, event: ClusterBindEvent) -> None: + """Handle a ZHA cluster bind event.""" + async_dispatcher_send( + self.gateway_proxy.hass, + ZHA_CLUSTER_HANDLER_MSG, + { + ATTR_TYPE: ZHA_CLUSTER_HANDLER_MSG_BIND, + ZHA_CLUSTER_HANDLER_MSG_DATA: { + ATTR_CLUSTER_NAME: event.cluster_name, + ATTR_CLUSTER_ID: event.cluster_id, + ATTR_SUCCESS: event.success, + }, + }, + ) + + +class EntityReference(NamedTuple): + """Describes an entity reference.""" + + ha_entity_id: str + entity_data: EntityData + ha_device_info: dr.DeviceInfo + remove_future: asyncio.Future[Any] + + +class ZHAGatewayProxy(EventBase): + """Proxy class to interact with the ZHA gateway.""" + + def __init__( + self, hass: HomeAssistant, config_entry: ConfigEntry, gateway: Gateway + ) -> None: + """Initialize the gateway proxy.""" + super().__init__() + self.hass = hass + self.config_entry = config_entry + self.gateway = gateway + self.device_proxies: dict[str, ZHADeviceProxy] = {} + self.group_proxies: dict[int, ZHAGroupProxy] = {} + self._ha_entity_refs: collections.defaultdict[EUI64, list[EntityReference]] = ( + collections.defaultdict(list) + ) + self._log_levels: dict[str, dict[str, int]] = { + DEBUG_LEVEL_ORIGINAL: async_capture_log_levels(), + DEBUG_LEVEL_CURRENT: async_capture_log_levels(), + } + self.debug_enabled: bool = False + self._log_relay_handler: LogRelayHandler = LogRelayHandler(hass, self) + self._unsubs: list[Callable[[], None]] = [] + self._unsubs.append(self.gateway.on_all_events(self._handle_event_protocol)) + self._reload_task: asyncio.Task | None = None + + @property + def ha_entity_refs(self) -> collections.defaultdict[EUI64, list[EntityReference]]: + """Return entities by ieee.""" + return self._ha_entity_refs + + def register_entity_reference( + self, + ha_entity_id: str, + entity_data: EntityData, + ha_device_info: dr.DeviceInfo, + remove_future: asyncio.Future[Any], + ) -> None: + """Record the creation of a hass entity associated with ieee.""" + self._ha_entity_refs[entity_data.device_proxy.device.ieee].append( + EntityReference( + ha_entity_id=ha_entity_id, + entity_data=entity_data, + ha_device_info=ha_device_info, + remove_future=remove_future, + ) + ) + + async def async_initialize_devices_and_entities(self) -> None: + """Initialize devices and entities.""" + for device in self.gateway.devices.values(): + device_proxy = self._async_get_or_create_device_proxy(device) + self._create_entity_metadata(device_proxy) + for group in self.gateway.groups.values(): + group_proxy = self._async_get_or_create_group_proxy(group) + self._create_entity_metadata(group_proxy) + + await self.gateway.async_initialize_devices_and_entities() + + @callback + def handle_connection_lost(self, event: ConnectionLostEvent) -> None: + """Handle a connection lost event.""" + + _LOGGER.debug("Connection to the radio was lost: %r", event) + + # Ensure we do not queue up multiple resets + if self._reload_task is not None: + _LOGGER.debug("Ignoring reset, one is already running") + return + + self._reload_task = self.hass.async_create_task( + self.hass.config_entries.async_reload(self.config_entry.entry_id), + ) + + @callback + def handle_device_joined(self, event: DeviceJoinedEvent) -> None: + """Handle a device joined event.""" + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_JOINED, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: event.device_info.nwk, + ATTR_IEEE: str(event.device_info.ieee), + DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, + }, + }, + ) + + @callback + def handle_device_removed(self, event: DeviceRemovedEvent) -> None: + """Handle a device removed event.""" + zha_device_proxy = self.device_proxies.pop(event.device_info.ieee, None) + entity_refs = self._ha_entity_refs.pop(event.device_info.ieee, None) + if zha_device_proxy is not None: + device_info = zha_device_proxy.zha_device_info + # zha_device_proxy.async_cleanup_handles() + async_dispatcher_send( + self.hass, + f"{SIGNAL_REMOVE_ENTITIES}_{zha_device_proxy.device.ieee!s}", + ) + self.hass.async_create_task( + self._async_remove_device(zha_device_proxy, entity_refs), + "ZHAGateway._async_remove_device", + ) + if device_info is not None: + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_REMOVED, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + @callback + def handle_device_left(self, event: DeviceLeftEvent) -> None: + """Handle a device left event.""" + + @callback + def handle_raw_device_initialized(self, event: RawDeviceInitializedEvent) -> None: + """Handle a raw device initialized event.""" + manuf = event.device_info.manufacturer + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_RAW_INIT, + ZHA_GW_MSG_DEVICE_INFO: { + ATTR_NWK: str(event.device_info.nwk), + ATTR_IEEE: str(event.device_info.ieee), + DEVICE_PAIRING_STATUS: event.device_info.pairing_status.name, + ATTR_MODEL: event.device_info.model + if event.device_info.model + else UNKNOWN_MODEL, + ATTR_MANUFACTURER: manuf if manuf else UNKNOWN_MANUFACTURER, + ATTR_SIGNATURE: event.device_info.signature, + }, + }, + ) + + @callback + def handle_device_fully_initialized(self, event: DeviceFullInitEvent) -> None: + """Handle a device fully initialized event.""" + zha_device = self.gateway.get_device(event.device_info.ieee) + zha_device_proxy = self._async_get_or_create_device_proxy(zha_device) + + device_info = zha_device_proxy.zha_device_info + device_info[DEVICE_PAIRING_STATUS] = event.device_info.pairing_status.name + if event.new_join: + self._create_entity_metadata(zha_device_proxy) + async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: ZHA_GW_MSG_DEVICE_FULL_INIT, + ZHA_GW_MSG_DEVICE_INFO: device_info, + }, + ) + + @callback + def handle_group_member_removed(self, event: GroupEvent) -> None: + """Handle a group member removed event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_member_removed - group_info: %s", event.group_info) + self._update_group_entities(event) + self._send_group_gateway_message( + zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_REMOVED + ) + + @callback + def handle_group_member_added(self, event: GroupEvent) -> None: + """Handle a group member added event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_member_added - group_info: %s", event.group_info) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_MEMBER_ADDED) + self._update_group_entities(event) + + @callback + def handle_group_added(self, event: GroupEvent) -> None: + """Handle a group added event.""" + zha_group_proxy = self._async_get_or_create_group_proxy(event.group_info) + zha_group_proxy.info("group_added") + self._update_group_entities(event) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_ADDED) + + @callback + def handle_group_removed(self, event: GroupEvent) -> None: + """Handle a group removed event.""" + zha_group_proxy = self.group_proxies.pop(event.group_info.group_id) + self._send_group_gateway_message(zha_group_proxy, ZHA_GW_MSG_GROUP_REMOVED) + zha_group_proxy.info("group_removed") + self._cleanup_group_entity_registry_entries(zha_group_proxy) + + @callback + def async_enable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Enable debug mode for ZHA.""" + self._log_levels[DEBUG_LEVEL_ORIGINAL] = async_capture_log_levels() + async_set_logger_levels(DEBUG_LEVELS) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + + if filterer: + self._log_relay_handler.addFilter(filterer) + + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).addHandler(self._log_relay_handler) + + self.debug_enabled = True + + @callback + def async_disable_debug_mode(self, filterer: _LogFilterType | None = None) -> None: + """Disable debug mode for ZHA.""" + async_set_logger_levels(self._log_levels[DEBUG_LEVEL_ORIGINAL]) + self._log_levels[DEBUG_LEVEL_CURRENT] = async_capture_log_levels() + for logger_name in DEBUG_RELAY_LOGGERS: + logging.getLogger(logger_name).removeHandler(self._log_relay_handler) + if filterer: + self._log_relay_handler.removeFilter(filterer) + self.debug_enabled = False + + async def shutdown(self) -> None: + """Shutdown the gateway proxy.""" + for unsub in self._unsubs: + unsub() + await self.gateway.shutdown() + + def get_device_proxy(self, ieee: EUI64) -> ZHADeviceProxy | None: + """Return ZHADevice for given ieee.""" + return self.device_proxies.get(ieee) + + def get_group_proxy(self, group_id: int | str) -> ZHAGroupProxy | None: + """Return Group for given group id.""" + if isinstance(group_id, str): + for group_proxy in self.group_proxies.values(): + if group_proxy.group.name == group_id: + return group_proxy + return None + return self.group_proxies.get(group_id) + + def get_entity_reference(self, entity_id: str) -> EntityReference | None: + """Return entity reference for given entity_id if found.""" + for entity_reference in itertools.chain.from_iterable( + self.ha_entity_refs.values() + ): + if entity_id == entity_reference.ha_entity_id: + return entity_reference + return None + + def remove_entity_reference(self, entity: ZHAEntity) -> None: + """Remove entity reference for given entity_id if found.""" + if entity.zha_device.ieee in self.ha_entity_refs: + entity_refs = self.ha_entity_refs.get(entity.zha_device.ieee) + self.ha_entity_refs[entity.zha_device.ieee] = [ + e + for e in entity_refs # type: ignore[union-attr] + if e.ha_entity_id != entity.entity_id + ] + + def _async_get_or_create_device_proxy(self, zha_device: Device) -> ZHADeviceProxy: + """Get or create a ZHA device.""" + if (zha_device_proxy := self.device_proxies.get(zha_device.ieee)) is None: + zha_device_proxy = ZHADeviceProxy(zha_device, self) + self.device_proxies[zha_device_proxy.device.ieee] = zha_device_proxy + + device_registry = dr.async_get(self.hass) + device_registry_device = device_registry.async_get_or_create( + config_entry_id=self.config_entry.entry_id, + connections={(dr.CONNECTION_ZIGBEE, str(zha_device.ieee))}, + identifiers={(DOMAIN, str(zha_device.ieee))}, + name=zha_device.name, + manufacturer=zha_device.manufacturer, + model=zha_device.model, + ) + zha_device_proxy.device_id = device_registry_device.id + return zha_device_proxy + + def _async_get_or_create_group_proxy(self, group_info: GroupInfo) -> ZHAGroupProxy: + """Get or create a ZHA group.""" + zha_group_proxy = self.group_proxies.get(group_info.group_id) + if zha_group_proxy is None: + zha_group_proxy = ZHAGroupProxy( + self.gateway.groups[group_info.group_id], self + ) + self.group_proxies[group_info.group_id] = zha_group_proxy + return zha_group_proxy + + def _create_entity_metadata( + self, proxy_object: ZHADeviceProxy | ZHAGroupProxy + ) -> None: + """Create HA entity metadata.""" + ha_zha_data = get_zha_data(self.hass) + coordinator_proxy = self.device_proxies[ + self.gateway.coordinator_zha_device.ieee + ] + + if isinstance(proxy_object, ZHADeviceProxy): + for entity in proxy_object.device.platform_entities.values(): + ha_zha_data.platforms[Platform(entity.PLATFORM)].append( + EntityData( + entity=entity, device_proxy=proxy_object, group_proxy=None + ) + ) + else: + for entity in proxy_object.group.group_entities.values(): + ha_zha_data.platforms[Platform(entity.PLATFORM)].append( + EntityData( + entity=entity, + device_proxy=coordinator_proxy, + group_proxy=proxy_object, + ) + ) + + def _cleanup_group_entity_registry_entries( + self, zigpy_group: zigpy.group.Group + ) -> None: + """Remove entity registry entries for group entities when the groups are removed from HA.""" + # first we collect the potential unique ids for entities that could be created from this group + possible_entity_unique_ids = [ + f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + for domain in GROUP_ENTITY_DOMAINS + ] + + # then we get all group entity entries tied to the coordinator + entity_registry = er.async_get(self.hass) + assert self.coordinator_zha_device + all_group_entity_entries = er.async_entries_for_device( + entity_registry, + self.coordinator_zha_device.device_id, + include_disabled_entities=True, + ) + + # then we get the entity entries for this specific group + # by getting the entries that match + entries_to_remove = [ + entry + for entry in all_group_entity_entries + if entry.unique_id in possible_entity_unique_ids + ] + + # then we remove the entries from the entity registry + for entry in entries_to_remove: + _LOGGER.debug( + "cleaning up entity registry entry for entity: %s", entry.entity_id + ) + entity_registry.async_remove(entry.entity_id) + + def _update_group_entities(self, group_event: GroupEvent) -> None: + """Update group entities when a group event is received.""" + async_dispatcher_send( + self.hass, + f"{SIGNAL_REMOVE_ENTITIES}_group_{group_event.group_info.group_id}", + ) + self._create_entity_metadata( + self.group_proxies[group_event.group_info.group_id] + ) + async_dispatcher_send(self.hass, SIGNAL_ADD_ENTITIES) + + def _send_group_gateway_message( + self, zha_group_proxy: ZHAGroupProxy, gateway_message_type: str + ) -> None: + """Send the gateway event for a zigpy group event.""" + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + { + ATTR_TYPE: gateway_message_type, + ZHA_GW_MSG_GROUP_INFO: zha_group_proxy.group_info, + }, + ) + + async def _async_remove_device( + self, device: ZHADeviceProxy, entity_refs: list[EntityReference] | None + ) -> None: + if entity_refs is not None: + remove_tasks: list[asyncio.Future[Any]] = [ + entity_ref.remove_future for entity_ref in entity_refs + ] + if remove_tasks: + await asyncio.wait(remove_tasks) + + device_registry = dr.async_get(self.hass) + reg_device = device_registry.async_get(device.device_id) + if reg_device is not None: + device_registry.async_remove_device(reg_device.id) + + +@callback +def async_capture_log_levels() -> dict[str, int]: + """Capture current logger levels for ZHA.""" + return { + DEBUG_COMP_BELLOWS: logging.getLogger(DEBUG_COMP_BELLOWS).getEffectiveLevel(), + DEBUG_COMP_ZHA: logging.getLogger(DEBUG_COMP_ZHA).getEffectiveLevel(), + DEBUG_COMP_ZIGPY: logging.getLogger(DEBUG_COMP_ZIGPY).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZNP: logging.getLogger( + DEBUG_COMP_ZIGPY_ZNP + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_DECONZ: logging.getLogger( + DEBUG_COMP_ZIGPY_DECONZ + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_XBEE: logging.getLogger( + DEBUG_COMP_ZIGPY_XBEE + ).getEffectiveLevel(), + DEBUG_COMP_ZIGPY_ZIGATE: logging.getLogger( + DEBUG_COMP_ZIGPY_ZIGATE + ).getEffectiveLevel(), + DEBUG_LIB_ZHA: logging.getLogger(DEBUG_LIB_ZHA).getEffectiveLevel(), + } + + +@callback +def async_set_logger_levels(levels: dict[str, int]) -> None: + """Set logger levels for ZHA.""" + logging.getLogger(DEBUG_COMP_BELLOWS).setLevel(levels[DEBUG_COMP_BELLOWS]) + logging.getLogger(DEBUG_COMP_ZHA).setLevel(levels[DEBUG_COMP_ZHA]) + logging.getLogger(DEBUG_COMP_ZIGPY).setLevel(levels[DEBUG_COMP_ZIGPY]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZNP).setLevel(levels[DEBUG_COMP_ZIGPY_ZNP]) + logging.getLogger(DEBUG_COMP_ZIGPY_DECONZ).setLevel(levels[DEBUG_COMP_ZIGPY_DECONZ]) + logging.getLogger(DEBUG_COMP_ZIGPY_XBEE).setLevel(levels[DEBUG_COMP_ZIGPY_XBEE]) + logging.getLogger(DEBUG_COMP_ZIGPY_ZIGATE).setLevel(levels[DEBUG_COMP_ZIGPY_ZIGATE]) + logging.getLogger(DEBUG_LIB_ZHA).setLevel(levels[DEBUG_LIB_ZHA]) + + +class LogRelayHandler(logging.Handler): + """Log handler for error messages.""" + + def __init__(self, hass: HomeAssistant, gateway: ZHAGatewayProxy) -> None: + """Initialize a new LogErrorHandler.""" + super().__init__() + self.hass = hass + self.gateway = gateway + hass_path: str = HOMEASSISTANT_PATH[0] + config_dir = self.hass.config.config_dir + self.paths_re = re.compile( + r"(?:{})/(.*)".format( + "|".join([re.escape(x) for x in (hass_path, config_dir)]) + ) + ) + + def emit(self, record: LogRecord) -> None: + """Relay log message via dispatcher.""" + entry = LogEntry( + record, self.paths_re, figure_out_source=record.levelno >= logging.WARNING + ) + async_dispatcher_send( + self.hass, + ZHA_GW_MSG, + {ATTR_TYPE: ZHA_GW_MSG_LOG_OUTPUT, ZHA_GW_MSG_LOG_ENTRY: entry.to_dict()}, + ) + + +@dataclasses.dataclass(kw_only=True, slots=True) +class HAZHAData: + """ZHA data stored in `hass.data`.""" + + yaml_config: ConfigType = dataclasses.field(default_factory=dict) + config_entry: ConfigEntry | None = dataclasses.field(default=None) + device_trigger_cache: dict[str, tuple[str, dict]] = dataclasses.field( + default_factory=dict + ) + gateway_proxy: ZHAGatewayProxy | None = dataclasses.field(default=None) + platforms: collections.defaultdict[Platform, list] = dataclasses.field( + default_factory=lambda: collections.defaultdict(list) + ) + update_coordinator: ZHAFirmwareUpdateCoordinator | None = dataclasses.field( + default=None + ) + + +@dataclasses.dataclass(kw_only=True, slots=True) +class EntityData: + """ZHA entity data.""" + + entity: PlatformEntity | GroupEntity + device_proxy: ZHADeviceProxy + group_proxy: ZHAGroupProxy | None = dataclasses.field(default=None) + + @property + def is_group_entity(self) -> bool: + """Return if this is a group entity.""" + return self.group_proxy is not None and isinstance(self.entity, GroupEntity) + + +def get_zha_data(hass: HomeAssistant) -> HAZHAData: + """Get the global ZHA data object.""" + if DATA_ZHA not in hass.data: + hass.data[DATA_ZHA] = HAZHAData() + + return hass.data[DATA_ZHA] + + +def get_zha_gateway(hass: HomeAssistant) -> Gateway: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists") + + return gateway_proxy.gateway + + +def get_zha_gateway_proxy(hass: HomeAssistant) -> ZHAGatewayProxy: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists") + + return gateway_proxy + + +def get_config_entry(hass: HomeAssistant) -> ConfigEntry: + """Get the ZHA gateway object.""" + if (gateway_proxy := get_zha_data(hass).gateway_proxy) is None: + raise ValueError("No gateway object exists to retrieve the config entry from.") + + return gateway_proxy.config_entry + + +@callback +def async_get_zha_device_proxy(hass: HomeAssistant, device_id: str) -> ZHADeviceProxy: + """Get a ZHA device for the given device registry id.""" + device_registry = dr.async_get(hass) + registry_device = device_registry.async_get(device_id) + if not registry_device: + _LOGGER.error("Device id `%s` not found in registry", device_id) + raise KeyError(f"Device id `{device_id}` not found in registry.") + zha_gateway_proxy = get_zha_gateway_proxy(hass) + try: + ieee_address = list(registry_device.identifiers)[0][1] + ieee = EUI64.convert(ieee_address) + except (IndexError, ValueError) as ex: + _LOGGER.error( + "Unable to determine device IEEE for device with device id `%s`", device_id + ) + raise KeyError( + f"Unable to determine device IEEE for device with device id `{device_id}`." + ) from ex + return zha_gateway_proxy.device_proxies[ieee] + + +def cluster_command_schema_to_vol_schema(schema: CommandSchema) -> vol.Schema: + """Convert a cluster command schema to a voluptuous schema.""" + return vol.Schema( + { + vol.Optional(field.name) + if field.optional + else vol.Required(field.name): schema_type_to_vol(field.type) + for field in schema.fields + } + ) + + +def schema_type_to_vol(field_type: Any) -> Any: + """Convert a schema type to a voluptuous type.""" + if issubclass(field_type, enum.Flag) and field_type.__members__: + return cv.multi_select( + [key.replace("_", " ") for key in field_type.__members__] + ) + if issubclass(field_type, enum.Enum) and field_type.__members__: + return vol.In([key.replace("_", " ") for key in field_type.__members__]) + if ( + issubclass(field_type, zigpy.types.FixedIntType) + or issubclass(field_type, enum.Flag) + or issubclass(field_type, enum.Enum) + ): + return vol.All( + vol.Coerce(int), vol.Range(field_type.min_value, field_type.max_value) + ) + return str + + +def convert_to_zcl_values( + fields: dict[str, Any], schema: CommandSchema +) -> dict[str, Any]: + """Convert user input to ZCL values.""" + converted_fields: dict[str, Any] = {} + for field in schema.fields: + if field.name not in fields: + continue + value = fields[field.name] + if issubclass(field.type, enum.Flag) and isinstance(value, list): + new_value = 0 + + for flag in value: + if isinstance(flag, str): + new_value |= field.type[flag.replace(" ", "_")] + else: + new_value |= flag + + value = field.type(new_value) + elif issubclass(field.type, enum.Enum): + value = ( + field.type[value.replace(" ", "_")] + if isinstance(value, str) + else field.type(value) + ) + else: + value = field.type(value) + _LOGGER.debug( + "Converted ZCL schema field(%s) value from: %s to: %s", + field.name, + fields[field.name], + value, + ) + converted_fields[field.name] = value + return converted_fields + + +def async_cluster_exists(hass: HomeAssistant, cluster_id, skip_coordinator=True): + """Determine if a device containing the specified in cluster is paired.""" + zha_gateway = get_zha_gateway(hass) + zha_devices = zha_gateway.devices.values() + for zha_device in zha_devices: + if skip_coordinator and zha_device.is_coordinator: + continue + clusters_by_endpoint = zha_device.async_get_clusters() + for clusters in clusters_by_endpoint.values(): + if ( + cluster_id in clusters[CLUSTER_TYPE_IN] + or cluster_id in clusters[CLUSTER_TYPE_OUT] + ): + return True + return False + + +@callback +async def async_add_entities( + _async_add_entities: AddEntitiesCallback, + entity_class: type[ZHAEntity], + entities: list[EntityData], + **kwargs, +) -> None: + """Add entities helper.""" + if not entities: + return + + entities_to_add = [] + for entity_data in entities: + try: + entities_to_add.append(entity_class(entity_data)) + # broad exception to prevent a single entity from preventing an entire platform from loading + # this can potentially be caused by a misbehaving device or a bad quirk. Not ideal but the + # alternative is adding try/catch to each entity class __init__ method with a specific exception + except Exception: # noqa: BLE001 + _LOGGER.exception( + "Error while adding entity from entity data: %s", entity_data + ) + _async_add_entities(entities_to_add, update_before_add=False) + entities.clear() + + +def _clean_serial_port_path(path: str) -> str: + """Clean the serial port path, applying corrections where necessary.""" + + if path.startswith("socket://"): + path = path.strip() + + # Removes extraneous brackets from IP addresses (they don't parse in CPython 3.11.4) + if re.match(r"^socket://\[\d+\.\d+\.\d+\.\d+\]:\d+$", path): + path = path.replace("[", "").replace("]", "") + + return path + + +CONF_ZHA_OPTIONS_SCHEMA = vol.Schema( + { + vol.Optional(CONF_DEFAULT_LIGHT_TRANSITION, default=0): vol.All( + vol.Coerce(float), vol.Range(min=0, max=2**16 / 10) + ), + vol.Required(CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, default=False): cv.boolean, + vol.Required(CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, default=True): cv.boolean, + vol.Required(CONF_ALWAYS_PREFER_XY_COLOR_MODE, default=True): cv.boolean, + vol.Required(CONF_GROUP_MEMBERS_ASSUME_STATE, default=True): cv.boolean, + vol.Required(CONF_ENABLE_IDENTIFY_ON_JOIN, default=True): cv.boolean, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_MAINS, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, + ): cv.positive_int, + vol.Optional( + CONF_CONSIDER_UNAVAILABLE_BATTERY, + default=CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, + ): cv.positive_int, + } +) + +CONF_ZHA_ALARM_SCHEMA = vol.Schema( + { + vol.Required(CONF_ALARM_MASTER_CODE, default="1234"): cv.string, + vol.Required(CONF_ALARM_FAILED_TRIES, default=3): cv.positive_int, + vol.Required(CONF_ALARM_ARM_REQUIRES_CODE, default=False): cv.boolean, + } +) + + +def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: + """Create ZHA lib configuration from HA config objects.""" + + # ensure that we have the necessary HA configuration data + assert ha_zha_data.config_entry is not None + assert ha_zha_data.yaml_config is not None + + # Remove brackets around IP addresses, this no longer works in CPython 3.11.4 + # This will be removed in 2023.11.0 + path = ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] + cleaned_path = _clean_serial_port_path(path) + + if path != cleaned_path: + _LOGGER.debug("Cleaned serial port path %r -> %r", path, cleaned_path) + ha_zha_data.config_entry.data[CONF_DEVICE][CONF_DEVICE_PATH] = cleaned_path + hass.config_entries.async_update_entry( + ha_zha_data.config_entry, data=ha_zha_data.config_entry.data + ) + + # deep copy the yaml config to avoid modifying the original and to safely + # pass it to the ZHA library + app_config = copy.deepcopy(ha_zha_data.yaml_config.get(CONF_ZIGPY, {})) + database = app_config.get( + CONF_DATABASE, + hass.config.path(DEFAULT_DATABASE_NAME), + ) + app_config[CONF_DATABASE] = database + app_config[CONF_DEVICE] = ha_zha_data.config_entry.data[CONF_DEVICE] + + radio_type = RadioType[ha_zha_data.config_entry.data[CONF_RADIO_TYPE]] + + # Until we have a way to coordinate channels with the Thread half of multi-PAN, + # stick to the old zigpy default of channel 15 instead of dynamically scanning + if ( + is_multiprotocol_url(app_config[CONF_DEVICE][CONF_DEVICE_PATH]) + and app_config.get(CONF_NWK, {}).get(CONF_NWK_CHANNEL) is None + ): + app_config.setdefault(CONF_NWK, {})[CONF_NWK_CHANNEL] = 15 + + options: MappingProxyType[str, Any] = ha_zha_data.config_entry.options.get( + CUSTOM_CONFIGURATION, {} + ) + zha_options = CONF_ZHA_OPTIONS_SCHEMA(options.get(ZHA_OPTIONS, {})) + ha_acp_options = CONF_ZHA_ALARM_SCHEMA(options.get(ZHA_ALARM_OPTIONS, {})) + light_options: LightOptions = LightOptions( + default_light_transition=zha_options.get(CONF_DEFAULT_LIGHT_TRANSITION), + enable_enhanced_light_transition=zha_options.get( + CONF_ENABLE_ENHANCED_LIGHT_TRANSITION + ), + enable_light_transitioning_flag=zha_options.get( + CONF_ENABLE_LIGHT_TRANSITIONING_FLAG + ), + always_prefer_xy_color_mode=zha_options.get(CONF_ALWAYS_PREFER_XY_COLOR_MODE), + group_members_assume_state=zha_options.get(CONF_GROUP_MEMBERS_ASSUME_STATE), + ) + device_options: DeviceOptions = DeviceOptions( + enable_identify_on_join=zha_options.get(CONF_ENABLE_IDENTIFY_ON_JOIN), + consider_unavailable_mains=zha_options.get(CONF_CONSIDER_UNAVAILABLE_MAINS), + consider_unavailable_battery=zha_options.get(CONF_CONSIDER_UNAVAILABLE_BATTERY), + ) + acp_options: AlarmControlPanelOptions = AlarmControlPanelOptions( + master_code=ha_acp_options.get(CONF_ALARM_MASTER_CODE), + failed_tries=ha_acp_options.get(CONF_ALARM_FAILED_TRIES), + arm_requires_code=ha_acp_options.get(CONF_ALARM_ARM_REQUIRES_CODE), + ) + coord_config: CoordinatorConfiguration = CoordinatorConfiguration( + path=app_config[CONF_DEVICE][CONF_DEVICE_PATH], + baudrate=app_config[CONF_DEVICE][CONF_BAUDRATE], + flow_control=app_config[CONF_DEVICE][CONF_FLOW_CONTROL], + radio_type=radio_type.name, + ) + quirks_config: QuirksConfiguration = QuirksConfiguration( + enabled=ha_zha_data.yaml_config.get(CONF_ENABLE_QUIRKS, True), + custom_quirks_path=ha_zha_data.yaml_config.get(CONF_CUSTOM_QUIRKS_PATH), + ) + overrides_config: dict[str, DeviceOverridesConfiguration] = {} + overrides: dict[str, dict[str, Any]] = cast( + dict[str, dict[str, Any]], ha_zha_data.yaml_config.get(CONF_DEVICE_CONFIG) + ) + if overrides is not None: + for unique_id, override in overrides.items(): + overrides_config[unique_id] = DeviceOverridesConfiguration( + type=override["type"], + ) + + return ZHAData( + zigpy_config=app_config, + config=ZHAConfiguration( + light_options=light_options, + device_options=device_options, + alarm_control_panel_options=acp_options, + coordinator_configuration=coord_config, + quirks_configuration=quirks_config, + device_overrides=overrides_config, + ), + ) + + +def convert_zha_error_to_ha_error( + func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Decorate ZHA commands and re-raises ZHAException as HomeAssistantError.""" + + @functools.wraps(func) + async def handler(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + try: + return await func(self, *args, **kwargs) + except ZHAException as err: + raise HomeAssistantError(err) from err + + return handler + + +def exclude_none_values(obj: Mapping[str, Any]) -> dict[str, Any]: + """Return a new dictionary excluding keys with None values.""" + return {k: v for k, v in obj.items() if v is not None} diff --git a/homeassistant/components/zha/light.py b/homeassistant/components/zha/light.py index 6fd08de889f..4a36030a0dd 100644 --- a/homeassistant/components/zha/light.py +++ b/homeassistant/components/zha/light.py @@ -2,93 +2,63 @@ from __future__ import annotations -from collections import Counter -from collections.abc import Callable -from datetime import timedelta +from collections.abc import Mapping import functools -import itertools import logging -import random -from typing import TYPE_CHECKING, Any +from typing import Any -from zigpy.zcl.clusters.general import Identify, LevelControl, OnOff -from zigpy.zcl.clusters.lighting import Color -from zigpy.zcl.foundation import Status +from zha.application.platforms.light.const import ( + ColorMode as ZhaColorMode, + LightEntityFeature as ZhaLightEntityFeature, +) -from homeassistant.components import light from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_EFFECT, + ATTR_FLASH, + ATTR_HS_COLOR, + ATTR_TRANSITION, + ATTR_XY_COLOR, ColorMode, + LightEntity, LightEntityFeature, - brightness_supported, - filter_supported_color_modes, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_SUPPORTED_FEATURES, - STATE_ON, - STATE_UNAVAILABLE, - Platform, -) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback -from homeassistant.helpers.debounce import Debouncer -from homeassistant.helpers.dispatcher import ( - async_dispatcher_connect, - async_dispatcher_send, -) +from homeassistant.const import STATE_ON, Platform +from homeassistant.core import HomeAssistant, State, callback +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later, async_track_time_interval -from .core import discovery, helpers -from .core.const import ( - CLUSTER_HANDLER_COLOR, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_ON_OFF, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - CONF_DEFAULT_LIGHT_TRANSITION, - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - CONF_GROUP_MEMBERS_ASSUME_STATE, - DATA_ZHA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - SIGNAL_SET_LEVEL, - ZHA_OPTIONS, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import LogMixin, async_get_zha_config_value, get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity -if TYPE_CHECKING: - from .core.device import ZHADevice - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_ON_OFF_TRANSITION = 1 # most bulbs default to a 1-second turn on/off transition -DEFAULT_EXTRA_TRANSITION_DELAY_SHORT = 0.25 -DEFAULT_EXTRA_TRANSITION_DELAY_LONG = 2.0 -DEFAULT_LONG_TRANSITION_TIME = 10 -DEFAULT_MIN_BRIGHTNESS = 2 -ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY = 0.05 - -FLASH_EFFECTS = { - light.FLASH_SHORT: Identify.EffectIdentifier.Blink, - light.FLASH_LONG: Identify.EffectIdentifier.Breathe, +ZHA_TO_HA_COLOR_MODE = { + ZhaColorMode.UNKNOWN: ColorMode.UNKNOWN, + ZhaColorMode.ONOFF: ColorMode.ONOFF, + ZhaColorMode.BRIGHTNESS: ColorMode.BRIGHTNESS, + ZhaColorMode.COLOR_TEMP: ColorMode.COLOR_TEMP, + ZhaColorMode.HS: ColorMode.HS, + ZhaColorMode.XY: ColorMode.XY, + ZhaColorMode.RGB: ColorMode.RGB, + ZhaColorMode.RGBW: ColorMode.RGBW, + ZhaColorMode.RGBWW: ColorMode.RGBWW, + ZhaColorMode.WHITE: ColorMode.WHITE, } -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.LIGHT) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.LIGHT) -SIGNAL_LIGHT_GROUP_STATE_CHANGED = "zha_light_group_state_changed" -SIGNAL_LIGHT_GROUP_TRANSITION_START = "zha_light_group_transition_start" -SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED = "zha_light_group_transition_finished" -SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE = "zha_light_group_assume_group_state" -DEFAULT_MIN_TRANSITION_MANUFACTURERS = {"sengled"} +HA_TO_ZHA_COLOR_MODE = {v: k for k, v in ZHA_TO_HA_COLOR_MODE.items()} -COLOR_MODES_GROUP_LIGHT = {ColorMode.COLOR_TEMP, ColorMode.XY} -SUPPORT_GROUP_LIGHT = ( - light.LightEntityFeature.EFFECT - | light.LightEntityFeature.FLASH - | light.LightEntityFeature.TRANSITION -) +OFF_BRIGHTNESS = "off_brightness" +OFF_WITH_TRANSITION = "off_with_transition" + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -104,1280 +74,144 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Light, entities_to_create ), ) config_entry.async_on_unload(unsub) -class BaseLight(LogMixin, light.LightEntity): - """Operations common to all light entities.""" +class Light(LightEntity, ZHAEntity): + """Representation of a ZHA or ZLL light.""" - _FORCE_ON = False - _DEFAULT_MIN_TRANSITION_TIME: float = 0 + def __init__(self, entity_data: EntityData) -> None: + """Initialize the ZHA light.""" + super().__init__(entity_data) + color_modes: set[ColorMode] = set() + has_brightness = False + for color_mode in self.entity_data.entity.supported_color_modes: + if color_mode == ZhaColorMode.BRIGHTNESS: + has_brightness = True + if color_mode not in (ZhaColorMode.BRIGHTNESS, ZhaColorMode.ONOFF): + color_modes.add(ZHA_TO_HA_COLOR_MODE[color_mode]) + if color_modes: + self._attr_supported_color_modes = color_modes + elif has_brightness: + color_modes.add(ColorMode.BRIGHTNESS) + self._attr_supported_color_modes = color_modes + else: + color_modes.add(ColorMode.ONOFF) + self._attr_supported_color_modes = color_modes - def __init__(self, *args, **kwargs): - """Initialize the light.""" - self._zha_device: ZHADevice = None - super().__init__(*args, **kwargs) - self._attr_min_mireds: int | None = 153 - self._attr_max_mireds: int | None = 500 - self._attr_color_mode = ColorMode.UNKNOWN # Set by subclasses - self._attr_supported_features: int = 0 - self._attr_state: bool | None - self._off_with_transition: bool = False - self._off_brightness: int | None = None - self._zha_config_transition = self._DEFAULT_MIN_TRANSITION_TIME - self._zha_config_enhanced_light_transition: bool = False - self._zha_config_enable_light_transitioning_flag: bool = True - self._zha_config_always_prefer_xy_color_mode: bool = True - self._on_off_cluster_handler = None - self._level_cluster_handler = None - self._color_cluster_handler = None - self._identify_cluster_handler = None - self._transitioning_individual: bool = False - self._transitioning_group: bool = False - self._transition_listener: Callable[[], None] | None = None + features = LightEntityFeature(0) + zha_features: ZhaLightEntityFeature = self.entity_data.entity.supported_features - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - self._async_unsub_transition_listener() - await super().async_will_remove_from_hass() + if ZhaLightEntityFeature.EFFECT in zha_features: + features |= LightEntityFeature.EFFECT + if ZhaLightEntityFeature.FLASH in zha_features: + features |= LightEntityFeature.FLASH + if ZhaLightEntityFeature.TRANSITION in zha_features: + features |= LightEntityFeature.TRANSITION + + self._attr_supported_features = features @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return state attributes.""" + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + state = self.entity_data.entity.state return { - "off_with_transition": self._off_with_transition, - "off_brightness": self._off_brightness, + "off_with_transition": state.get("off_with_transition"), + "off_brightness": state.get("off_brightness"), } @property def is_on(self) -> bool: """Return true if entity is on.""" - if self._attr_state is None: - return False - return self._attr_state - - @callback - def set_level(self, value: int) -> None: - """Set the brightness of this light between 0..254. - - brightness level 255 is a special value instructing the device to come - on at `on_level` Zigbee attribute value, regardless of the last set - level - """ - if self.is_transitioning: - self.debug( - "received level %s while transitioning - skipping update", - value, - ) - return - value = max(0, min(254, value)) - self._attr_brightness = value - self.async_write_ha_state() - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - transition = kwargs.get(light.ATTR_TRANSITION) - duration = ( - transition if transition is not None else self._zha_config_transition - ) or ( - # if 0 is passed in some devices still need the minimum default - self._DEFAULT_MIN_TRANSITION_TIME - ) - brightness = kwargs.get(light.ATTR_BRIGHTNESS) - effect = kwargs.get(light.ATTR_EFFECT) - flash = kwargs.get(light.ATTR_FLASH) - temperature = kwargs.get(light.ATTR_COLOR_TEMP) - xy_color = kwargs.get(light.ATTR_XY_COLOR) - hs_color = kwargs.get(light.ATTR_HS_COLOR) - - execute_if_off_supported = ( - self._GROUP_SUPPORTS_EXECUTE_IF_OFF - if isinstance(self, LightGroup) - else self._color_cluster_handler - and self._color_cluster_handler.execute_if_off_supported - ) - - set_transition_flag = ( - brightness_supported(self._attr_supported_color_modes) - or temperature is not None - or xy_color is not None - or hs_color is not None - ) and self._zha_config_enable_light_transitioning_flag - transition_time = ( - ( - duration + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - if ( - (brightness is not None or transition is not None) - and brightness_supported(self._attr_supported_color_modes) - or (self._off_with_transition and self._off_brightness is not None) - or temperature is not None - or xy_color is not None - or hs_color is not None - ) - else DEFAULT_ON_OFF_TRANSITION + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - ) - if set_transition_flag - else 0 - ) - - # If we need to pause attribute report parsing, we'll do so here. - # After successful calls, we later start a timer to unset the flag after - # transition_time. - # - On an error on the first move to level call, we unset the flag immediately - # if no previous timer is running. - # - On an error on subsequent calls, we start the transition timer, - # as a brightness call might have come through. - if set_transition_flag: - self.async_transition_set_flag() - - # If the light is currently off but a turn_on call with a color/temperature is - # sent, the light needs to be turned on first at a low brightness level where - # the light is immediately transitioned to the correct color. Afterwards, the - # transition is only from the low brightness to the new brightness. - # Otherwise, the transition is from the color the light had before being turned - # on to the new color. This can look especially bad with transitions longer than - # a second. We do not want to do this for devices that need to be forced to use - # the on command because we would end up with 4 commands sent: - # move to level, on, color, move to level... We also will not set this - # if the bulb is already in the desired color mode with the desired color - # or color temperature. - new_color_provided_while_off = ( - self._zha_config_enhanced_light_transition - and not self._FORCE_ON - and not self._attr_state - and ( - ( - temperature is not None - and ( - self._attr_color_temp != temperature - or self._attr_color_mode != ColorMode.COLOR_TEMP - ) - ) - or ( - xy_color is not None - and ( - self._attr_xy_color != xy_color - or self._attr_color_mode != ColorMode.XY - ) - ) - or ( - hs_color is not None - and ( - self._attr_hs_color != hs_color - or self._attr_color_mode != ColorMode.HS - ) - ) - ) - and brightness_supported(self._attr_supported_color_modes) - and not execute_if_off_supported - ) - - if ( - brightness is None - and (self._off_with_transition or new_color_provided_while_off) - and self._off_brightness is not None - ): - brightness = self._off_brightness - - if brightness is not None: - level = min(254, brightness) - else: - level = self._attr_brightness or 254 - - t_log = {} - - if new_color_provided_while_off: - # If the light is currently off, we first need to turn it on at a low - # brightness level with no transition. - # After that, we set it to the desired color/temperature with no transition. - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=DEFAULT_MIN_BRIGHTNESS, - transition_time=int(10 * self._DEFAULT_MIN_TRANSITION_TIME), - ) - t_log["move_to_level_with_on_off"] = result - if result[1] is not Status.SUCCESS: - # First 'move to level' call failed, so if the transitioning delay - # isn't running from a previous call, - # the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - # Currently only setting it to "on", as the correct level state will - # be set at the second move_to_level call - self._attr_state = True - - if execute_if_off_supported: - self.debug("handling color commands before turning on/level") - if not await self.async_handle_color_commands( - temperature, - duration, # duration is ignored by lights when off - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - # Color calls before on/level calls failed, - # so if the transitioning delay isn't running from a previous call, - # the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - - if ( - (brightness is not None or transition is not None) - and not new_color_provided_while_off - and brightness_supported(self._attr_supported_color_modes) - ): - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=level, - transition_time=int(10 * duration), - ) - t_log["move_to_level_with_on_off"] = result - if result[1] is not Status.SUCCESS: - # First 'move to level' call failed, so if the transitioning delay - # isn't running from a previous call, the flag can be unset immediately - if set_transition_flag and not self._transition_listener: - self.async_transition_complete() - self.debug("turned on: %s", t_log) - return - self._attr_state = bool(level) - if level: - self._attr_brightness = level - - if ( - (brightness is None and transition is None) - and not new_color_provided_while_off - or (self._FORCE_ON and brightness != 0) - ): - # since FORCE_ON lights don't turn on with move_to_level_with_on_off, - # we should call the on command on the on_off cluster - # if brightness is not 0. - result = await self._on_off_cluster_handler.on() - t_log["on_off"] = result - if result[1] is not Status.SUCCESS: - # 'On' call failed, but as brightness may still transition - # (for FORCE_ON lights), we start the timer to unset the flag after - # the transition_time if necessary. - self.async_transition_start_timer(transition_time) - self.debug("turned on: %s", t_log) - return - self._attr_state = True - - if not execute_if_off_supported: - self.debug("handling color commands after turning on/level") - if not await self.async_handle_color_commands( - temperature, - duration, - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - # Color calls failed, but as brightness may still transition, - # we start the timer to unset the flag - self.async_transition_start_timer(transition_time) - self.debug("turned on: %s", t_log) - return - - if new_color_provided_while_off: - # The light has the correct color, so we can now transition - # it to the correct brightness level. - result = await self._level_cluster_handler.move_to_level( - level=level, transition_time=int(10 * duration) - ) - t_log["move_to_level_if_color"] = result - if result[1] is not Status.SUCCESS: - self.debug("turned on: %s", t_log) - return - self._attr_state = bool(level) - if level: - self._attr_brightness = level - - # Our light is guaranteed to have just started the transitioning process - # if necessary, so we start the delay for the transition (to stop parsing - # attribute reports after the completed transition). - self.async_transition_start_timer(transition_time) - - if effect == light.EFFECT_COLORLOOP: - result = await self._color_cluster_handler.color_loop_set( - update_flags=( - Color.ColorLoopUpdateFlags.Action - | Color.ColorLoopUpdateFlags.Direction - | Color.ColorLoopUpdateFlags.Time - ), - action=Color.ColorLoopAction.Activate_from_current_hue, - direction=Color.ColorLoopDirection.Increment, - time=transition if transition else 7, - start_hue=0, - ) - t_log["color_loop_set"] = result - self._attr_effect = light.EFFECT_COLORLOOP - elif ( - self._attr_effect == light.EFFECT_COLORLOOP - and effect != light.EFFECT_COLORLOOP - ): - result = await self._color_cluster_handler.color_loop_set( - update_flags=Color.ColorLoopUpdateFlags.Action, - action=Color.ColorLoopAction.Deactivate, - direction=Color.ColorLoopDirection.Decrement, - time=0, - start_hue=0, - ) - t_log["color_loop_set"] = result - self._attr_effect = None - - if flash is not None: - result = await self._identify_cluster_handler.trigger_effect( - effect_id=FLASH_EFFECTS[flash], - effect_variant=Identify.EffectVariant.Default, - ) - t_log["trigger_effect"] = result - - self._off_with_transition = False - self._off_brightness = None - self.debug("turned on: %s", t_log) - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - transition = kwargs.get(light.ATTR_TRANSITION) - supports_level = brightness_supported(self._attr_supported_color_modes) - - transition_time = ( - transition or self._DEFAULT_MIN_TRANSITION_TIME - if transition is not None - else DEFAULT_ON_OFF_TRANSITION - ) + DEFAULT_EXTRA_TRANSITION_DELAY_SHORT - - # Start pausing attribute report parsing - if self._zha_config_enable_light_transitioning_flag: - self.async_transition_set_flag() - - # is not none looks odd here, but it will override built in bulb - # transition times if we pass 0 in here - if transition is not None and supports_level: - result = await self._level_cluster_handler.move_to_level_with_on_off( - level=0, - transition_time=int( - 10 * (transition or self._DEFAULT_MIN_TRANSITION_TIME) - ), - ) - else: - result = await self._on_off_cluster_handler.off() - - # Pause parsing attribute reports until transition is complete - if self._zha_config_enable_light_transitioning_flag: - self.async_transition_start_timer(transition_time) - self.debug("turned off: %s", result) - if result[1] is not Status.SUCCESS: - return - self._attr_state = False - - if supports_level and not self._off_with_transition: - # store current brightness so that the next turn_on uses it: - # when using "enhanced turn on" - self._off_brightness = self._attr_brightness - if transition is not None: - # save for when calling turn_on without a brightness: - # current_level is set to 1 after transitioning to level 0, - # needed for correct state with light groups - self._attr_brightness = 1 - self._off_with_transition = transition is not None - - self.async_write_ha_state() - - async def async_handle_color_commands( - self, - temperature, - duration, - hs_color, - xy_color, - new_color_provided_while_off, - t_log, - ): - """Process ZCL color commands.""" - - transition_time = ( - self._DEFAULT_MIN_TRANSITION_TIME - if new_color_provided_while_off - else duration - ) - - if temperature is not None: - result = await self._color_cluster_handler.move_to_color_temp( - color_temp_mireds=temperature, - transition_time=int(10 * transition_time), - ) - t_log["move_to_color_temp"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.COLOR_TEMP - self._attr_color_temp = temperature - self._attr_xy_color = None - self._attr_hs_color = None - - if hs_color is not None: - if ( - not isinstance(self, LightGroup) - and self._color_cluster_handler.enhanced_hue_supported - ): - result = await self._color_cluster_handler.enhanced_move_to_hue_and_saturation( - enhanced_hue=int(hs_color[0] * 65535 / 360), - saturation=int(hs_color[1] * 2.54), - transition_time=int(10 * transition_time), - ) - t_log["enhanced_move_to_hue_and_saturation"] = result - else: - result = await self._color_cluster_handler.move_to_hue_and_saturation( - hue=int(hs_color[0] * 254 / 360), - saturation=int(hs_color[1] * 2.54), - transition_time=int(10 * transition_time), - ) - t_log["move_to_hue_and_saturation"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.HS - self._attr_hs_color = hs_color - self._attr_xy_color = None - self._attr_color_temp = None - xy_color = None # don't set xy_color if it is also present - - if xy_color is not None: - result = await self._color_cluster_handler.move_to_color( - color_x=int(xy_color[0] * 65535), - color_y=int(xy_color[1] * 65535), - transition_time=int(10 * transition_time), - ) - t_log["move_to_color"] = result - if result[1] is not Status.SUCCESS: - return False - self._attr_color_mode = ColorMode.XY - self._attr_xy_color = xy_color - self._attr_color_temp = None - self._attr_hs_color = None - - return True + return self.entity_data.entity.is_on @property - def is_transitioning(self) -> bool: - """Return if the light is transitioning.""" - return self._transitioning_individual or self._transitioning_group + def brightness(self) -> int: + """Return the brightness of this light.""" + return self.entity_data.entity.brightness - @callback - def async_transition_set_flag(self) -> None: - """Set _transitioning to True.""" - self.debug("setting transitioning flag to True") - self._transitioning_individual = True - self._transitioning_group = False - if isinstance(self, LightGroup): - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_TRANSITION_START, - {"entity_ids": self._entity_ids}, - ) - self._async_unsub_transition_listener() - - @callback - def async_transition_start_timer(self, transition_time) -> None: - """Start a timer to unset _transitioning_individual after transition_time. - - If necessary. - """ - if not transition_time: - return - # For longer transitions, we want to extend the timer a bit more - if transition_time >= DEFAULT_LONG_TRANSITION_TIME: - transition_time += DEFAULT_EXTRA_TRANSITION_DELAY_LONG - self.debug("starting transitioning timer for %s", transition_time) - self._transition_listener = async_call_later( - self._zha_device.hass, - transition_time, - self.async_transition_complete, - ) - - @callback - def _async_unsub_transition_listener(self) -> None: - """Unsubscribe transition listener.""" - if self._transition_listener: - self._transition_listener() - self._transition_listener = None - - @callback - def async_transition_complete(self, _=None) -> None: - """Set _transitioning_individual to False and write HA state.""" - self.debug("transition complete - future attribute reports will write HA state") - self._transitioning_individual = False - self._async_unsub_transition_listener() - self.async_write_ha_state() - if isinstance(self, LightGroup): - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, - {"entity_ids": self._entity_ids}, - ) - if self._debounced_member_refresh is not None: - self.debug("transition complete - refreshing group member states") - assert self.platform.config_entry - self.platform.config_entry.async_create_background_task( - self.hass, - self._debounced_member_refresh.async_call(), - "zha.light-refresh-debounced-member", - ) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, -) -class Light(BaseLight, ZhaEntity): - """Representation of a ZHA or ZLL light.""" - - _attr_supported_color_modes: set[ColorMode] - _attr_translation_key: str = "light" - _REFRESH_INTERVAL = (45, 75) - - def __init__( - self, unique_id, zha_device: ZHADevice, cluster_handlers, **kwargs - ) -> None: - """Initialize the ZHA light.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - self._attr_state = bool(self._on_off_cluster_handler.on_off) - self._level_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_LEVEL) - self._color_cluster_handler = self.cluster_handlers.get(CLUSTER_HANDLER_COLOR) - self._identify_cluster_handler = zha_device.identify_ch - if self._color_cluster_handler: - self._attr_min_mireds: int = self._color_cluster_handler.min_mireds - self._attr_max_mireds: int = self._color_cluster_handler.max_mireds - self._cancel_refresh_handle: CALLBACK_TYPE | None = None - effect_list = [] - - self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - True, - ) - - self._attr_supported_color_modes = {ColorMode.ONOFF} - if self._level_cluster_handler: - self._attr_supported_color_modes.add(ColorMode.BRIGHTNESS) - self._attr_supported_features |= light.LightEntityFeature.TRANSITION - self._attr_brightness = self._level_cluster_handler.current_level - - if self._color_cluster_handler: - if self._color_cluster_handler.color_temp_supported: - self._attr_supported_color_modes.add(ColorMode.COLOR_TEMP) - self._attr_color_temp = self._color_cluster_handler.color_temperature - - if self._color_cluster_handler.xy_supported and ( - self._zha_config_always_prefer_xy_color_mode - or not self._color_cluster_handler.hs_supported - ): - self._attr_supported_color_modes.add(ColorMode.XY) - curr_x = self._color_cluster_handler.current_x - curr_y = self._color_cluster_handler.current_y - if curr_x is not None and curr_y is not None: - self._attr_xy_color = (curr_x / 65535, curr_y / 65535) - else: - self._attr_xy_color = (0, 0) - - if ( - self._color_cluster_handler.hs_supported - and not self._zha_config_always_prefer_xy_color_mode - ): - self._attr_supported_color_modes.add(ColorMode.HS) - if ( - self._color_cluster_handler.enhanced_hue_supported - and self._color_cluster_handler.enhanced_current_hue is not None - ): - curr_hue = ( - self._color_cluster_handler.enhanced_current_hue * 65535 / 360 - ) - elif self._color_cluster_handler.current_hue is not None: - curr_hue = self._color_cluster_handler.current_hue * 254 / 360 - else: - curr_hue = 0 - - if ( - curr_saturation := self._color_cluster_handler.current_saturation - ) is None: - curr_saturation = 0 - - self._attr_hs_color = ( - int(curr_hue), - int(curr_saturation * 2.54), - ) - - if self._color_cluster_handler.color_loop_supported: - self._attr_supported_features |= light.LightEntityFeature.EFFECT - effect_list.append(light.EFFECT_COLORLOOP) - if self._color_cluster_handler.color_loop_active == 1: - self._attr_effect = light.EFFECT_COLORLOOP - self._attr_supported_color_modes = filter_supported_color_modes( - self._attr_supported_color_modes - ) - if len(self._attr_supported_color_modes) == 1: - self._attr_color_mode = next(iter(self._attr_supported_color_modes)) - else: # Light supports color_temp + hs, determine which mode the light is in - assert self._color_cluster_handler - if ( - self._color_cluster_handler.color_mode - == Color.ColorMode.Color_temperature - ): - self._attr_color_mode = ColorMode.COLOR_TEMP - else: - self._attr_color_mode = ColorMode.XY - - if self._identify_cluster_handler: - self._attr_supported_features |= light.LightEntityFeature.FLASH - - if effect_list: - self._attr_effect_list = effect_list - - self._zha_config_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_DEFAULT_LIGHT_TRANSITION, - 0, - ) - self._zha_config_enhanced_light_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_ENHANCED_LIGHT_TRANSITION, - False, - ) - self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - True, - ) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Set the state.""" - if self.is_transitioning: - self.debug( - "received onoff %s while transitioning - skipping update", - value, - ) - return - self._attr_state = bool(value) - if value: - self._off_with_transition = False - self._off_brightness = None - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - if self._level_cluster_handler: - self.async_accept_signal( - self._level_cluster_handler, SIGNAL_SET_LEVEL, self.set_level - ) - refresh_interval = random.randint(*(x * 60 for x in self._REFRESH_INTERVAL)) - self._cancel_refresh_handle = async_track_time_interval( - self.hass, self._refresh, timedelta(seconds=refresh_interval) - ) - self.debug("started polling with refresh interval of %s", refresh_interval) - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_STATE_CHANGED, - self._maybe_force_refresh, - signal_override=True, - ) - - @callback - def transition_on(signal): - """Handle a transition start event from a group.""" - if self.entity_id in signal["entity_ids"]: - self.debug( - "group transition started - setting member transitioning flag" - ) - self._transitioning_group = True - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_TRANSITION_START, - transition_on, - signal_override=True, - ) - - @callback - def transition_off(signal): - """Handle a transition finished event from a group.""" - if self.entity_id in signal["entity_ids"]: - self.debug( - "group transition completed - unsetting member transitioning flag" - ) - self._transitioning_group = False - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_TRANSITION_FINISHED, - transition_off, - signal_override=True, - ) - - self.async_accept_signal( - None, - SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, - self._assume_group_state, - signal_override=True, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - assert self._cancel_refresh_handle - self._cancel_refresh_handle() - self._cancel_refresh_handle = None - self.debug("stopped polling during device removal") - await super().async_will_remove_from_hass() - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._attr_state = last_state.state == STATE_ON - if "brightness" in last_state.attributes: - self._attr_brightness = last_state.attributes["brightness"] - if "off_with_transition" in last_state.attributes: - self._off_with_transition = last_state.attributes["off_with_transition"] - if "off_brightness" in last_state.attributes: - self._off_brightness = last_state.attributes["off_brightness"] - if (color_mode := last_state.attributes.get("color_mode")) is not None: - self._attr_color_mode = ColorMode(color_mode) - if "color_temp" in last_state.attributes: - self._attr_color_temp = last_state.attributes["color_temp"] - if "xy_color" in last_state.attributes: - self._attr_xy_color = last_state.attributes["xy_color"] - if "hs_color" in last_state.attributes: - self._attr_hs_color = last_state.attributes["hs_color"] - if "effect" in last_state.attributes: - self._attr_effect = last_state.attributes["effect"] - - async def async_get_state(self) -> None: - """Attempt to retrieve the state from the light.""" - if not self._attr_available: - return - self.debug("polling current state") - - if self._on_off_cluster_handler: - state = await self._on_off_cluster_handler.get_attribute_value( - "on_off", from_cache=False - ) - # check if transition started whilst waiting for polled state - if self.is_transitioning: - return - - if state is not None: - self._attr_state = state - if state: # reset "off with transition" flag if the light is on - self._off_with_transition = False - self._off_brightness = None - - if self._level_cluster_handler: - level = await self._level_cluster_handler.get_attribute_value( - "current_level", from_cache=False - ) - # check if transition started whilst waiting for polled state - if self.is_transitioning: - return - if level is not None: - self._attr_brightness = level - - if self._color_cluster_handler: - attributes = [ - "color_mode", - "current_x", - "current_y", - ] - if ( - not self._zha_config_always_prefer_xy_color_mode - and self._color_cluster_handler.enhanced_hue_supported - ): - attributes.append("enhanced_current_hue") - attributes.append("current_saturation") - if ( - self._color_cluster_handler.hs_supported - and not self._color_cluster_handler.enhanced_hue_supported - and not self._zha_config_always_prefer_xy_color_mode - ): - attributes.append("current_hue") - attributes.append("current_saturation") - if self._color_cluster_handler.color_temp_supported: - attributes.append("color_temperature") - if self._color_cluster_handler.color_loop_supported: - attributes.append("color_loop_active") - - results = await self._color_cluster_handler.get_attributes( - attributes, from_cache=False, only_cache=False - ) - - # although rare, a transition might have been started while we were waiting - # for the polled attributes, so abort if we are transitioning, - # as that state will not be accurate - if self.is_transitioning: - return - - if (color_mode := results.get("color_mode")) is not None: - if color_mode == Color.ColorMode.Color_temperature: - self._attr_color_mode = ColorMode.COLOR_TEMP - color_temp = results.get("color_temperature") - if color_temp is not None and color_mode: - self._attr_color_temp = color_temp - self._attr_xy_color = None - self._attr_hs_color = None - elif ( - color_mode == Color.ColorMode.Hue_and_saturation - and not self._zha_config_always_prefer_xy_color_mode - ): - self._attr_color_mode = ColorMode.HS - if self._color_cluster_handler.enhanced_hue_supported: - current_hue = results.get("enhanced_current_hue") - else: - current_hue = results.get("current_hue") - current_saturation = results.get("current_saturation") - if current_hue is not None and current_saturation is not None: - self._attr_hs_color = ( - int(current_hue * 360 / 65535) - if self._color_cluster_handler.enhanced_hue_supported - else int(current_hue * 360 / 254), - int(current_saturation / 2.54), - ) - self._attr_xy_color = None - self._attr_color_temp = None - else: - self._attr_color_mode = ColorMode.XY - color_x = results.get("current_x") - color_y = results.get("current_y") - if color_x is not None and color_y is not None: - self._attr_xy_color = (color_x / 65535, color_y / 65535) - self._attr_color_temp = None - self._attr_hs_color = None - - color_loop_active = results.get("color_loop_active") - if color_loop_active is not None: - if color_loop_active == 1: - self._attr_effect = light.EFFECT_COLORLOOP - else: - self._attr_effect = None - - async def async_update(self) -> None: - """Update to the latest state.""" - if self.is_transitioning: - self.debug("skipping async_update while transitioning") - return - await self.async_get_state() - - async def _refresh(self, time): - """Call async_get_state at an interval.""" - if self.is_transitioning: - self.debug("skipping _refresh while transitioning") - return - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("polling for updated state") - await self.async_get_state() - self.async_write_ha_state() - else: - self.debug( - "skipping polling for updated state, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - async def _maybe_force_refresh(self, signal): - """Force update the state if the signal contains the entity id for this entity.""" - if self.entity_id in signal["entity_ids"]: - if self.is_transitioning: - self.debug("skipping _maybe_force_refresh while transitioning") - return - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("forcing polling for updated state") - await self.async_get_state() - self.async_write_ha_state() - else: - self.debug( - "skipping _maybe_force_refresh, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - @callback - def _assume_group_state(self, signal, update_params) -> None: - """Handle an assume group state event from a group.""" - if self.entity_id in signal["entity_ids"] and self._attr_available: - self.debug("member assuming group state with: %s", update_params) - - state = update_params["state"] - brightness = update_params.get(light.ATTR_BRIGHTNESS) - color_mode = update_params.get(light.ATTR_COLOR_MODE) - color_temp = update_params.get(light.ATTR_COLOR_TEMP) - xy_color = update_params.get(light.ATTR_XY_COLOR) - hs_color = update_params.get(light.ATTR_HS_COLOR) - effect = update_params.get(light.ATTR_EFFECT) - - supported_modes = self._attr_supported_color_modes - - # unset "off brightness" and "off with transition" - # if group turned on this light - if state and not self._attr_state: - self._off_with_transition = False - self._off_brightness = None - - # set "off brightness" and "off with transition" - # if group turned off this light, and the light was not already off - # (to not override _off_with_transition) - elif ( - not state and self._attr_state and brightness_supported(supported_modes) - ): - # use individual brightness, instead of possibly averaged - # brightness from group - self._off_brightness = self._attr_brightness - self._off_with_transition = update_params["off_with_transition"] - - # Note: If individual lights have off_with_transition set, but not the - # group, and the group is then turned on without a level, individual lights - # might fall back to brightness level 1. - # Since all lights might need different brightness levels to be turned on, - # we can't use one group call. And making individual calls when turning on - # a ZHA group would cause a lot of traffic. In this case, - # turn_on should either just be called with a level or individual turn_on - # calls can be used. - - # state is always set (light.turn_on/light.turn_off) - self._attr_state = state - - # before assuming a group state attribute, check if the attribute - # was actually set in that call - if brightness is not None and brightness_supported(supported_modes): - self._attr_brightness = brightness - if color_mode is not None and color_mode in supported_modes: - self._attr_color_mode = color_mode - if color_temp is not None and ColorMode.COLOR_TEMP in supported_modes: - self._attr_color_temp = color_temp - if xy_color is not None and ColorMode.XY in supported_modes: - self._attr_xy_color = xy_color - if hs_color is not None and ColorMode.HS in supported_modes: - self._attr_hs_color = hs_color - # the effect is always deactivated in async_turn_on if not provided - if effect is None: - self._attr_effect = None - elif self._attr_effect_list and effect in self._attr_effect_list: - self._attr_effect = effect - - self.async_write_ha_state() - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers={"Philips", "Signify Netherlands B.V."}, -) -class HueLight(Light): - """Representation of a HUE light which does not report attributes.""" - - _REFRESH_INTERVAL = (3, 5) - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers={"Jasco", "Jasco Products", "Quotra-Vision", "eWeLight", "eWeLink"}, -) -class ForceOnLight(Light): - """Representation of a light which does not respect on/off for move_to_level_with_on_off commands.""" - - _FORCE_ON = True - - -@STRICT_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, - aux_cluster_handlers={CLUSTER_HANDLER_COLOR, CLUSTER_HANDLER_LEVEL}, - manufacturers=DEFAULT_MIN_TRANSITION_MANUFACTURERS, -) -class MinTransitionLight(Light): - """Representation of a light which does not react to any "move to" calls with 0 as a transition.""" - - # Transitions are counted in 1/10th of a second increments, so this is the smallest - _DEFAULT_MIN_TRANSITION_TIME = 0.1 - - -@GROUP_MATCH() -class LightGroup(BaseLight, ZhaGroupEntity): - """Representation of a light group.""" - - _attr_translation_key: str = "light_group" - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a light group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - group = self.zha_device.gateway.get_group(self._group_id) - - self._GROUP_SUPPORTS_EXECUTE_IF_OFF = True - - for member in group.members: - # Ensure we do not send group commands that violate the minimum transition - # time of any members. - if member.device.manufacturer in DEFAULT_MIN_TRANSITION_MANUFACTURERS: - self._DEFAULT_MIN_TRANSITION_TIME = ( - MinTransitionLight._DEFAULT_MIN_TRANSITION_TIME # noqa: SLF001 - ) - - # Check all group members to see if they support execute_if_off. - # If at least one member has a color cluster and doesn't support it, - # it's not used. - for endpoint in member.device._endpoints.values(): # noqa: SLF001 - for cluster_handler in endpoint.all_cluster_handlers.values(): - if ( - cluster_handler.name == CLUSTER_HANDLER_COLOR - and not cluster_handler.execute_if_off_supported - ): - self._GROUP_SUPPORTS_EXECUTE_IF_OFF = False - break - - self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] - self._level_cluster_handler = group.endpoint[LevelControl.cluster_id] - self._color_cluster_handler = group.endpoint[Color.cluster_id] - self._identify_cluster_handler = group.endpoint[Identify.cluster_id] - self._debounced_member_refresh: Debouncer | None = None - self._zha_config_transition = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_DEFAULT_LIGHT_TRANSITION, - 0, - ) - self._zha_config_enable_light_transitioning_flag = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ENABLE_LIGHT_TRANSITIONING_FLAG, - True, - ) - self._zha_config_always_prefer_xy_color_mode = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - True, - ) - self._zha_config_group_members_assume_state = async_get_zha_config_value( - zha_device.gateway.config_entry, - ZHA_OPTIONS, - CONF_GROUP_MEMBERS_ASSUME_STATE, - True, - ) - if self._zha_config_group_members_assume_state: - self._update_group_from_child_delay = ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY - self._zha_config_enhanced_light_transition = False - - self._attr_color_mode = ColorMode.UNKNOWN - self._attr_supported_color_modes = {ColorMode.ONOFF} - - # remove this when all ZHA platforms and base entities are updated @property - def available(self) -> bool: - """Return entity availability.""" - return self._attr_available + def min_mireds(self) -> int: + """Return the coldest color_temp that this light supports.""" + return self.entity_data.entity.min_mireds - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._debounced_member_refresh is None: - force_refresh_debouncer = Debouncer( - self.hass, - _LOGGER, - cooldown=3, - immediate=True, - function=self._force_member_updates, - ) - self._debounced_member_refresh = force_refresh_debouncer - self.async_on_remove(force_refresh_debouncer.async_cancel) + @property + def max_mireds(self) -> int: + """Return the warmest color_temp that this light supports.""" + return self.entity_data.entity.max_mireds + @property + def hs_color(self) -> tuple[float, float] | None: + """Return the hs color value [int, int].""" + return self.entity_data.entity.hs_color + + @property + def xy_color(self) -> tuple[float, float] | None: + """Return the xy color value [float, float].""" + return self.entity_data.entity.xy_color + + @property + def color_temp(self) -> int | None: + """Return the CT color value in mireds.""" + return self.entity_data.entity.color_temp + + @property + def color_mode(self) -> ColorMode | None: + """Return the color mode.""" + if self.entity_data.entity.color_mode is None: + return None + return ZHA_TO_HA_COLOR_MODE[self.entity_data.entity.color_mode] + + @property + def effect_list(self) -> list[str] | None: + """Return the list of supported effects.""" + return self.entity_data.entity.effect_list + + @property + def effect(self) -> str | None: + """Return the current effect.""" + return self.entity_data.entity.effect + + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - # "off with transition" and "off brightness" will get overridden when - # turning on the group, but they are needed for setting the assumed - # member state correctly, so save them here - off_brightness = self._off_brightness if self._off_with_transition else None - await super().async_turn_on(**kwargs) - if self._zha_config_group_members_assume_state: - self._send_member_assume_state_event(True, kwargs, off_brightness) - if self.is_transitioning: # when transitioning, state is refreshed at the end - return - if self._debounced_member_refresh: - await self._debounced_member_refresh.async_call() + await self.entity_data.entity.async_turn_on( + transition=kwargs.get(ATTR_TRANSITION), + brightness=kwargs.get(ATTR_BRIGHTNESS), + effect=kwargs.get(ATTR_EFFECT), + flash=kwargs.get(ATTR_FLASH), + color_temp=kwargs.get(ATTR_COLOR_TEMP), + xy_color=kwargs.get(ATTR_XY_COLOR), + hs_color=kwargs.get(ATTR_HS_COLOR), + ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await super().async_turn_off(**kwargs) - if self._zha_config_group_members_assume_state: - self._send_member_assume_state_event(False, kwargs) - if self.is_transitioning: - return - if self._debounced_member_refresh: - await self._debounced_member_refresh.async_call() - - async def async_update(self) -> None: - """Query all members and determine the light group state.""" - self.debug("updating group state") - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - on_states = [state for state in states if state.state == STATE_ON] - - self._attr_state = len(on_states) > 0 - - # reset "off with transition" flag if any member is on - if self._attr_state: - self._off_with_transition = False - self._off_brightness = None - - self._attr_available = any(state.state != STATE_UNAVAILABLE for state in states) - - self._attr_brightness = helpers.reduce_attribute( - on_states, light.ATTR_BRIGHTNESS + await self.entity_data.entity.async_turn_off( + transition=kwargs.get(ATTR_TRANSITION) ) + self.async_write_ha_state() - self._attr_xy_color = helpers.reduce_attribute( - on_states, light.ATTR_XY_COLOR, reduce=helpers.mean_tuple - ) - - if not self._zha_config_always_prefer_xy_color_mode: - self._attr_hs_color = helpers.reduce_attribute( - on_states, light.ATTR_HS_COLOR, reduce=helpers.mean_tuple - ) - - self._attr_color_temp = helpers.reduce_attribute( - on_states, light.ATTR_COLOR_TEMP - ) - self._attr_min_mireds = helpers.reduce_attribute( - states, light.ATTR_MIN_MIREDS, default=153, reduce=min - ) - self._attr_max_mireds = helpers.reduce_attribute( - states, light.ATTR_MAX_MIREDS, default=500, reduce=max - ) - - self._attr_effect_list = None - all_effect_lists = list( - helpers.find_state_attributes(states, light.ATTR_EFFECT_LIST) - ) - if all_effect_lists: - # Merge all effects from all effect_lists with a union merge. - self._attr_effect_list = list(set().union(*all_effect_lists)) - - self._attr_effect = None - all_effects = list(helpers.find_state_attributes(on_states, light.ATTR_EFFECT)) - if all_effects: - # Report the most common effect. - effects_count = Counter(itertools.chain(all_effects)) - self._attr_effect = effects_count.most_common(1)[0][0] - - supported_color_modes = {ColorMode.ONOFF} - all_supported_color_modes: list[set[ColorMode]] = list( - helpers.find_state_attributes(states, light.ATTR_SUPPORTED_COLOR_MODES) - ) - if all_supported_color_modes: - # Merge all color modes. - supported_color_modes = filter_supported_color_modes( - set().union(*all_supported_color_modes) - ) - - self._attr_supported_color_modes = supported_color_modes - - self._attr_color_mode = ColorMode.UNKNOWN - all_color_modes = list( - helpers.find_state_attributes(on_states, light.ATTR_COLOR_MODE) - ) - if all_color_modes: - # Report the most common color mode, select brightness and onoff last - color_mode_count = Counter(itertools.chain(all_color_modes)) - if ColorMode.ONOFF in color_mode_count: - if ColorMode.ONOFF in supported_color_modes: - color_mode_count[ColorMode.ONOFF] = -1 - else: - color_mode_count.pop(ColorMode.ONOFF) - if ColorMode.BRIGHTNESS in color_mode_count: - if ColorMode.BRIGHTNESS in supported_color_modes: - color_mode_count[ColorMode.BRIGHTNESS] = 0 - else: - color_mode_count.pop(ColorMode.BRIGHTNESS) - if color_mode_count: - self._attr_color_mode = color_mode_count.most_common(1)[0][0] - else: - self._attr_color_mode = next(iter(supported_color_modes)) - - if self._attr_color_mode == ColorMode.HS and ( - color_mode_count[ColorMode.HS] != len(self._group.members) - or self._zha_config_always_prefer_xy_color_mode - ): # switch to XY if all members do not support HS - self._attr_color_mode = ColorMode.XY - - self._attr_supported_features = LightEntityFeature(0) - for support in helpers.find_state_attributes(states, ATTR_SUPPORTED_FEATURES): - # Merge supported features by emulating support for every feature - # we find. - self._attr_supported_features |= support - # Bitwise-and the supported features with the GroupedLight's features - # so that we don't break in the future when a new feature is added. - self._attr_supported_features &= SUPPORT_GROUP_LIGHT - - async def _force_member_updates(self) -> None: - """Force the update of member entities to ensure the states are correct for bulbs that don't report their state.""" - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_STATE_CHANGED, - {"entity_ids": self._entity_ids}, - ) - - def _send_member_assume_state_event( - self, state, service_kwargs, off_brightness=None - ) -> None: - """Send an assume event to all members of the group.""" - update_params = { - "state": state, - "off_with_transition": self._off_with_transition, - } - - # check if the parameters were actually updated - # in the service call before updating members - if light.ATTR_BRIGHTNESS in service_kwargs: # or off brightness - update_params[light.ATTR_BRIGHTNESS] = self._attr_brightness - elif off_brightness is not None: - # if we turn on the group light with "off brightness", - # pass that to the members - update_params[light.ATTR_BRIGHTNESS] = off_brightness - - if light.ATTR_COLOR_TEMP in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_COLOR_TEMP] = self._attr_color_temp - - if light.ATTR_XY_COLOR in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_XY_COLOR] = self._attr_xy_color - - if light.ATTR_HS_COLOR in service_kwargs: - update_params[light.ATTR_COLOR_MODE] = self._attr_color_mode - update_params[light.ATTR_HS_COLOR] = self._attr_hs_color - - if light.ATTR_EFFECT in service_kwargs: - update_params[light.ATTR_EFFECT] = self._attr_effect - - async_dispatcher_send( - self.hass, - SIGNAL_LIGHT_GROUP_ASSUME_GROUP_STATE, - {"entity_ids": self._entity_ids}, - update_params, + @callback + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + self.entity_data.entity.restore_external_state_attributes( + state=(state.state == STATE_ON), + off_with_transition=state.attributes.get(OFF_WITH_TRANSITION), + off_brightness=state.attributes.get(OFF_BRIGHTNESS), + brightness=state.attributes.get(ATTR_BRIGHTNESS), + color_temp=state.attributes.get(ATTR_COLOR_TEMP), + xy_color=state.attributes.get(ATTR_XY_COLOR), + hs_color=state.attributes.get(ATTR_HS_COLOR), + color_mode=( + HA_TO_ZHA_COLOR_MODE[ColorMode(state.attributes[ATTR_COLOR_MODE])] + if state.attributes.get(ATTR_COLOR_MODE) is not None + else None + ), + effect=state.attributes.get(ATTR_EFFECT), ) diff --git a/homeassistant/components/zha/lock.py b/homeassistant/components/zha/lock.py index fa719075c05..ebac03eb7b8 100644 --- a/homeassistant/components/zha/lock.py +++ b/homeassistant/components/zha/lock.py @@ -4,35 +4,25 @@ import functools from typing import Any import voluptuous as vol -from zigpy.zcl.foundation import Status -from homeassistant.components.lock import STATE_LOCKED, STATE_UNLOCKED, LockEntity +from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import ( AddEntitiesCallback, async_get_current_platform, ) -from homeassistant.helpers.typing import StateType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_DOORLOCK, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -# The first state is Zigbee 'Not fully locked' -STATE_LIST = [STATE_UNLOCKED, STATE_LOCKED, STATE_UNLOCKED] -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.LOCK) - -VALUE_TO_STATE = dict(enumerate(STATE_LIST)) SERVICE_SET_LOCK_USER_CODE = "set_lock_user_code" SERVICE_ENABLE_LOCK_USER_CODE = "enable_lock_user_code" @@ -53,7 +43,7 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, ZhaDoorLock, entities_to_create ), ) config_entry.async_on_unload(unsub) @@ -94,105 +84,57 @@ async def async_setup_entry( ) -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DOORLOCK) -class ZhaDoorLock(ZhaEntity, LockEntity): +class ZhaDoorLock(ZHAEntity, LockEntity): """Representation of a ZHA lock.""" _attr_translation_key: str = "door_lock" - def __init__(self, unique_id, zha_device, cluster_handlers, **kwargs): - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._doorlock_cluster_handler = self.cluster_handlers.get( - CLUSTER_HANDLER_DOORLOCK - ) - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._doorlock_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_restore_last_state(self, last_state): - """Restore previous state.""" - self._state = VALUE_TO_STATE.get(last_state.state, last_state.state) - @property def is_locked(self) -> bool: """Return true if entity is locked.""" - if self._state is None: - return False - return self._state == STATE_LOCKED - - @property - def extra_state_attributes(self) -> dict[str, StateType]: - """Return state attributes.""" - return self.state_attributes + return self.entity_data.entity.is_locked + @convert_zha_error_to_ha_error async def async_lock(self, **kwargs: Any) -> None: """Lock the lock.""" - result = await self._doorlock_cluster_handler.lock_door() - if result[0] is not Status.SUCCESS: - self.error("Error with lock_door: %s", result) - return + await self.entity_data.entity.async_lock() self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - result = await self._doorlock_cluster_handler.unlock_door() - if result[0] is not Status.SUCCESS: - self.error("Error with unlock_door: %s", result) - return + await self.entity_data.entity.async_unlock() self.async_write_ha_state() - async def async_update(self) -> None: - """Attempt to retrieve state from the lock.""" - await super().async_update() - await self.async_get_state() - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle state update from cluster handler.""" - self._state = VALUE_TO_STATE.get(value, self._state) - self.async_write_ha_state() - - async def async_get_state(self, from_cache=True): - """Attempt to retrieve state from the lock.""" - if self._doorlock_cluster_handler: - state = await self._doorlock_cluster_handler.get_attribute_value( - "lock_state", from_cache=from_cache - ) - if state is not None: - self._state = VALUE_TO_STATE.get(state, self._state) - - async def refresh(self, time): - """Call async_get_state at an interval.""" - await self.async_get_state(from_cache=False) - + @convert_zha_error_to_ha_error async def async_set_lock_user_code(self, code_slot: int, user_code: str) -> None: """Set the user_code to index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_set_user_code( - code_slot, user_code - ) - self.debug("User code at slot %s set", code_slot) + await self.entity_data.entity.async_set_lock_user_code( + code_slot=code_slot, user_code=user_code + ) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_enable_lock_user_code(self, code_slot: int) -> None: """Enable user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_enable_user_code(code_slot) - self.debug("User code at slot %s enabled", code_slot) + await self.entity_data.entity.async_enable_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_disable_lock_user_code(self, code_slot: int) -> None: """Disable user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_disable_user_code(code_slot) - self.debug("User code at slot %s disabled", code_slot) + await self.entity_data.entity.async_disable_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_clear_lock_user_code(self, code_slot: int) -> None: """Clear the user_code at index X on the lock.""" - if self._doorlock_cluster_handler: - await self._doorlock_cluster_handler.async_clear_user_code(code_slot) - self.debug("User code at slot %s cleared", code_slot) + await self.entity_data.entity.async_clear_lock_user_code(code_slot=code_slot) + self.async_write_ha_state() + + @callback + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + self.entity_data.entity.restore_external_state_attributes( + state=state.state, + ) diff --git a/homeassistant/components/zha/logbook.py b/homeassistant/components/zha/logbook.py index e63ef565824..3de81e1255d 100644 --- a/homeassistant/components/zha/logbook.py +++ b/homeassistant/components/zha/logbook.py @@ -5,16 +5,18 @@ from __future__ import annotations from collections.abc import Callable from typing import TYPE_CHECKING +from zha.application.const import ZHA_EVENT + from homeassistant.components.logbook import LOGBOOK_ENTRY_MESSAGE, LOGBOOK_ENTRY_NAME from homeassistant.const import ATTR_COMMAND, ATTR_DEVICE_ID from homeassistant.core import Event, HomeAssistant, callback import homeassistant.helpers.device_registry as dr -from .core.const import DOMAIN as ZHA_DOMAIN, ZHA_EVENT -from .core.helpers import async_get_zha_device +from .const import DOMAIN as ZHA_DOMAIN +from .helpers import async_get_zha_device_proxy if TYPE_CHECKING: - from .core.device import ZHADevice + from zha.zigbee.device import Device @callback @@ -30,7 +32,7 @@ def async_describe_events( """Describe ZHA logbook event.""" device: dr.DeviceEntry | None = None device_name: str = "Unknown device" - zha_device: ZHADevice | None = None + zha_device: Device | None = None event_data = event.data event_type: str | None = None event_subtype: str | None = None @@ -39,7 +41,9 @@ def async_describe_events( device = device_registry.devices[event.data[ATTR_DEVICE_ID]] if device: device_name = device.name_by_user or device.name or "Unknown device" - zha_device = async_get_zha_device(hass, event.data[ATTR_DEVICE_ID]) + zha_device = async_get_zha_device_proxy( + hass, event.data[ATTR_DEVICE_ID] + ).device except (KeyError, AttributeError): pass diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 7087ff0b2f0..4a597b0233c 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -18,20 +18,10 @@ "zigpy_xbee", "zigpy_zigate", "zigpy_znp", + "zha", "universal_silabs_flasher" ], - "requirements": [ - "bellows==0.39.1", - "pyserial==3.5", - "zha-quirks==0.0.117", - "zigpy-deconz==0.23.2", - "zigpy==0.64.1", - "zigpy-xbee==0.20.1", - "zigpy-zigate==0.12.1", - "zigpy-znp==0.12.2", - "universal-silabs-flasher==0.0.20", - "pyserial-asyncio-fast==0.11" - ], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/number.py b/homeassistant/components/zha/number.py index 9320b4494a4..263f5262994 100644 --- a/homeassistant/components/zha/number.py +++ b/homeassistant/components/zha/number.py @@ -4,267 +4,25 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy.quirks.v2 import NumberMetadata -from zigpy.zcl.clusters.hvac import Thermostat - -from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode +from homeassistant.components.number import RestoreNumber from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - EntityCategory, - Platform, - UnitOfMass, - UnitOfTemperature, - UnitOfTime, -) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UndefinedType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ANALOG_OUTPUT, - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COLOR, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_LEVEL, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class, validate_unit -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice _LOGGER = logging.getLogger(__name__) -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.NUMBER) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.NUMBER -) - - -UNITS = { - 0: "Square-meters", - 1: "Square-feet", - 2: "Milliamperes", - 3: "Amperes", - 4: "Ohms", - 5: "Volts", - 6: "Kilo-volts", - 7: "Mega-volts", - 8: "Volt-amperes", - 9: "Kilo-volt-amperes", - 10: "Mega-volt-amperes", - 11: "Volt-amperes-reactive", - 12: "Kilo-volt-amperes-reactive", - 13: "Mega-volt-amperes-reactive", - 14: "Degrees-phase", - 15: "Power-factor", - 16: "Joules", - 17: "Kilojoules", - 18: "Watt-hours", - 19: "Kilowatt-hours", - 20: "BTUs", - 21: "Therms", - 22: "Ton-hours", - 23: "Joules-per-kilogram-dry-air", - 24: "BTUs-per-pound-dry-air", - 25: "Cycles-per-hour", - 26: "Cycles-per-minute", - 27: "Hertz", - 28: "Grams-of-water-per-kilogram-dry-air", - 29: "Percent-relative-humidity", - 30: "Millimeters", - 31: "Meters", - 32: "Inches", - 33: "Feet", - 34: "Watts-per-square-foot", - 35: "Watts-per-square-meter", - 36: "Lumens", - 37: "Luxes", - 38: "Foot-candles", - 39: "Kilograms", - 40: "Pounds-mass", - 41: "Tons", - 42: "Kilograms-per-second", - 43: "Kilograms-per-minute", - 44: "Kilograms-per-hour", - 45: "Pounds-mass-per-minute", - 46: "Pounds-mass-per-hour", - 47: "Watts", - 48: "Kilowatts", - 49: "Megawatts", - 50: "BTUs-per-hour", - 51: "Horsepower", - 52: "Tons-refrigeration", - 53: "Pascals", - 54: "Kilopascals", - 55: "Bars", - 56: "Pounds-force-per-square-inch", - 57: "Centimeters-of-water", - 58: "Inches-of-water", - 59: "Millimeters-of-mercury", - 60: "Centimeters-of-mercury", - 61: "Inches-of-mercury", - 62: "°C", - 63: "°K", - 64: "°F", - 65: "Degree-days-Celsius", - 66: "Degree-days-Fahrenheit", - 67: "Years", - 68: "Months", - 69: "Weeks", - 70: "Days", - 71: "Hours", - 72: "Minutes", - 73: "Seconds", - 74: "Meters-per-second", - 75: "Kilometers-per-hour", - 76: "Feet-per-second", - 77: "Feet-per-minute", - 78: "Miles-per-hour", - 79: "Cubic-feet", - 80: "Cubic-meters", - 81: "Imperial-gallons", - 82: "Liters", - 83: "Us-gallons", - 84: "Cubic-feet-per-minute", - 85: "Cubic-meters-per-second", - 86: "Imperial-gallons-per-minute", - 87: "Liters-per-second", - 88: "Liters-per-minute", - 89: "Us-gallons-per-minute", - 90: "Degrees-angular", - 91: "Degrees-Celsius-per-hour", - 92: "Degrees-Celsius-per-minute", - 93: "Degrees-Fahrenheit-per-hour", - 94: "Degrees-Fahrenheit-per-minute", - 95: None, - 96: "Parts-per-million", - 97: "Parts-per-billion", - 98: "%", - 99: "Percent-per-second", - 100: "Per-minute", - 101: "Per-second", - 102: "Psi-per-Degree-Fahrenheit", - 103: "Radians", - 104: "Revolutions-per-minute", - 105: "Currency1", - 106: "Currency2", - 107: "Currency3", - 108: "Currency4", - 109: "Currency5", - 110: "Currency6", - 111: "Currency7", - 112: "Currency8", - 113: "Currency9", - 114: "Currency10", - 115: "Square-inches", - 116: "Square-centimeters", - 117: "BTUs-per-pound", - 118: "Centimeters", - 119: "Pounds-mass-per-second", - 120: "Delta-Degrees-Fahrenheit", - 121: "Delta-Degrees-Kelvin", - 122: "Kilohms", - 123: "Megohms", - 124: "Millivolts", - 125: "Kilojoules-per-kilogram", - 126: "Megajoules", - 127: "Joules-per-degree-Kelvin", - 128: "Joules-per-kilogram-degree-Kelvin", - 129: "Kilohertz", - 130: "Megahertz", - 131: "Per-hour", - 132: "Milliwatts", - 133: "Hectopascals", - 134: "Millibars", - 135: "Cubic-meters-per-hour", - 136: "Liters-per-hour", - 137: "Kilowatt-hours-per-square-meter", - 138: "Kilowatt-hours-per-square-foot", - 139: "Megajoules-per-square-meter", - 140: "Megajoules-per-square-foot", - 141: "Watts-per-square-meter-Degree-Kelvin", - 142: "Cubic-feet-per-second", - 143: "Percent-obscuration-per-foot", - 144: "Percent-obscuration-per-meter", - 145: "Milliohms", - 146: "Megawatt-hours", - 147: "Kilo-BTUs", - 148: "Mega-BTUs", - 149: "Kilojoules-per-kilogram-dry-air", - 150: "Megajoules-per-kilogram-dry-air", - 151: "Kilojoules-per-degree-Kelvin", - 152: "Megajoules-per-degree-Kelvin", - 153: "Newton", - 154: "Grams-per-second", - 155: "Grams-per-minute", - 156: "Tons-per-hour", - 157: "Kilo-BTUs-per-hour", - 158: "Hundredths-seconds", - 159: "Milliseconds", - 160: "Newton-meters", - 161: "Millimeters-per-second", - 162: "Millimeters-per-minute", - 163: "Meters-per-minute", - 164: "Meters-per-hour", - 165: "Cubic-meters-per-minute", - 166: "Meters-per-second-per-second", - 167: "Amperes-per-meter", - 168: "Amperes-per-square-meter", - 169: "Ampere-square-meters", - 170: "Farads", - 171: "Henrys", - 172: "Ohm-meters", - 173: "Siemens", - 174: "Siemens-per-meter", - 175: "Teslas", - 176: "Volts-per-degree-Kelvin", - 177: "Volts-per-meter", - 178: "Webers", - 179: "Candelas", - 180: "Candelas-per-square-meter", - 181: "Kelvins-per-hour", - 182: "Kelvins-per-minute", - 183: "Joule-seconds", - 185: "Square-meters-per-Newton", - 186: "Kilogram-per-cubic-meter", - 187: "Newton-seconds", - 188: "Newtons-per-meter", - 189: "Watts-per-meter-per-degree-Kelvin", -} - -ICONS = { - 0: "mdi:temperature-celsius", - 1: "mdi:water-percent", - 2: "mdi:gauge", - 3: "mdi:speedometer", - 4: "mdi:percent", - 5: "mdi:air-filter", - 6: "mdi:fan", - 7: "mdi:flash", - 8: "mdi:current-ac", - 9: "mdi:flash", - 10: "mdi:flash", - 11: "mdi:flash", - 12: "mdi:counter", - 13: "mdi:thermometer-lines", - 14: "mdi:timer", - 15: "mdi:palette", - 16: "mdi:brightness-percent", -} - async def async_setup_entry( hass: HomeAssistant, @@ -279,875 +37,53 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZhaNumber, entities_to_create ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ANALOG_OUTPUT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZhaNumber(ZhaEntity, NumberEntity): +class ZhaNumber(ZHAEntity, RestoreNumber): """Representation of a ZHA Number entity.""" - _attr_translation_key: str = "number" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this entity.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._analog_output_cluster_handler = self.cluster_handlers[ - CLUSTER_HANDLER_ANALOG_OUTPUT - ] - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._analog_output_cluster_handler, - SIGNAL_ATTR_UPDATED, - self.async_set_state, - ) - - @property - def native_value(self) -> float | None: - """Return the current value.""" - return self._analog_output_cluster_handler.present_value - - @property - def native_min_value(self) -> float: - """Return the minimum value.""" - min_present_value = self._analog_output_cluster_handler.min_present_value - if min_present_value is not None: - return min_present_value - return 0 - - @property - def native_max_value(self) -> float: - """Return the maximum value.""" - max_present_value = self._analog_output_cluster_handler.max_present_value - if max_present_value is not None: - return max_present_value - return 1023 - - @property - def native_step(self) -> float | None: - """Return the value step.""" - resolution = self._analog_output_cluster_handler.resolution - if resolution is not None: - return resolution - return super().native_step - @property def name(self) -> str | UndefinedType | None: """Return the name of the number entity.""" - description = self._analog_output_cluster_handler.description - if description is not None and len(description) > 0: - return f"{super().name} {description}" - return super().name + if (description := self.entity_data.entity.description) is None: + return super().name + + # The name of this entity is reported by the device itself. + # For backwards compatibility, we keep the same format as before. This + # should probably be changed in the future to omit the prefix. + return f"{super().name} {description}" @property - def icon(self) -> str | None: - """Return the icon to be used for this entity.""" - application_type = self._analog_output_cluster_handler.application_type - if application_type is not None: - return ICONS.get(application_type >> 16, super().icon) - return super().icon - - @property - def native_unit_of_measurement(self) -> str | None: - """Return the unit the value is expressed in.""" - engineering_units = self._analog_output_cluster_handler.engineering_units - return UNITS.get(engineering_units) - - @callback - def async_set_state(self, attr_id, attr_name, value): - """Handle value update from cluster handler.""" - self.async_write_ha_state() - - async def async_set_native_value(self, value: float) -> None: - """Update the current value from HA.""" - await self._analog_output_cluster_handler.async_set_present_value(float(value)) - self.async_write_ha_state() - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - await super().async_update() - _LOGGER.debug("polling current state") - if self._analog_output_cluster_handler: - value = await self._analog_output_cluster_handler.get_attribute_value( - "present_value", from_cache=False - ) - _LOGGER.debug("read value=%s", value) - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity): - """Representation of a ZHA number configuration entity.""" - - _attr_entity_category = EntityCategory.CONFIG - _attr_native_step: float = 1.0 - _attr_multiplier: float = 1 - _attribute_name: str - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this number configuration entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: NumberMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - - if entity_metadata.min is not None: - self._attr_native_min_value = entity_metadata.min - if entity_metadata.max is not None: - self._attr_native_max_value = entity_metadata.max - if entity_metadata.step is not None: - self._attr_native_step = entity_metadata.step - if entity_metadata.multiplier is not None: - self._attr_multiplier = entity_metadata.multiplier - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - NumberDeviceClass, - entity_metadata.device_class, - Platform.NUMBER.value, - _LOGGER, - ) - if entity_metadata.device_class is None and entity_metadata.unit is not None: - self._attr_native_unit_of_measurement = validate_unit( - entity_metadata.unit - ).value - - @property - def native_value(self) -> float: + def native_value(self) -> float | None: """Return the current value.""" - return ( - self._cluster_handler.cluster.get(self._attribute_name) - * self._attr_multiplier - ) - - async def async_set_native_value(self, value: float) -> None: - """Update the current value from HA.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: int(value / self._attr_multiplier)} - ) - self.async_write_ha_state() - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - await super().async_update() - _LOGGER.debug("polling current state") - if self._cluster_handler: - value = await self._cluster_handler.get_attribute_value( - self._attribute_name, from_cache=False - ) - _LOGGER.debug("read value=%s", value) - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.motion.ac02", "lumi.motion.agl04"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraMotionDetectionInterval(ZHANumberConfigurationEntity): - """Representation of a ZHA motion detection interval configuration entity.""" - - _unique_id_suffix = "detection_interval" - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 65535 - _attribute_name = "detection_interval" - _attr_translation_key: str = "detection_interval" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnOffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on off transition time configuration entity.""" - - _unique_id_suffix = "on_off_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFF - _attribute_name = "on_off_transition_time" - _attr_translation_key: str = "on_off_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnLevelConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on level configuration entity.""" - - _unique_id_suffix = "on_level" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFF - _attribute_name = "on_level" - _attr_translation_key: str = "on_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OnTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA on transition time configuration entity.""" - - _unique_id_suffix = "on_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFE - _attribute_name = "on_transition_time" - _attr_translation_key: str = "on_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class OffTransitionTimeConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA off transition time configuration entity.""" - - _unique_id_suffix = "off_transition_time" - _attr_native_min_value: float = 0x0000 - _attr_native_max_value: float = 0xFFFE - _attribute_name = "off_transition_time" - _attr_translation_key: str = "off_transition_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DefaultMoveRateConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA default move rate configuration entity.""" - - _unique_id_suffix = "default_move_rate" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFE - _attribute_name = "default_move_rate" - _attr_translation_key: str = "default_move_rate" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEVEL) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class StartUpCurrentLevelConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA startup current level configuration entity.""" - - _unique_id_suffix = "start_up_current_level" - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFF - _attribute_name = "start_up_current_level" - _attr_translation_key: str = "start_up_current_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COLOR) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class StartUpColorTemperatureConfigurationEntity(ZHANumberConfigurationEntity): - """Representation of a ZHA startup color temperature configuration entity.""" - - _unique_id_suffix = "start_up_color_temperature" - _attr_native_min_value: float = 153 - _attr_native_max_value: float = 500 - _attribute_name = "start_up_color_temperature" - _attr_translation_key: str = "start_up_color_temperature" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this ZHA startup color temperature entity.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - if self._cluster_handler: - self._attr_native_min_value: float = self._cluster_handler.min_mireds - self._attr_native_max_value: float = self._cluster_handler.max_mireds - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TimerDurationMinutes(ZHANumberConfigurationEntity): - """Representation of a ZHA timer duration configuration entity.""" - - _unique_id_suffix = "timer_duration" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0x257 - _attr_native_unit_of_measurement: str | None = UNITS[72] - _attribute_name = "timer_duration" - _attr_translation_key: str = "timer_duration" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class FilterLifeTime(ZHANumberConfigurationEntity): - """Representation of a ZHA filter lifetime configuration entity.""" - - _unique_id_suffix = "filter_life_time" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0x00 - _attr_native_max_value: float = 0xFFFFFFFF - _attr_native_unit_of_measurement: str | None = UNITS[72] - _attribute_name = "filter_life_time" - _attr_translation_key: str = "filter_life_time" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, - manufacturers={"TexasInstruments"}, - models={"ti.router"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TiRouterTransmitPower(ZHANumberConfigurationEntity): - """Representation of a ZHA TI transmit power configuration entity.""" - - _unique_id_suffix = "transmit_power" - _attr_native_min_value: float = -20 - _attr_native_max_value: float = 20 - _attribute_name = "transmit_power" - _attr_translation_key: str = "transmit_power" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingUpSpeed(ZHANumberConfigurationEntity): - """Inovelli remote dimming up speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_up_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 126 - _attribute_name = "dimming_speed_up_remote" - _attr_translation_key: str = "dimming_speed_up_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliButtonDelay(ZHANumberConfigurationEntity): - """Inovelli button delay configuration entity.""" - - _unique_id_suffix = "button_delay" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 9 - _attribute_name = "button_delay" - _attr_translation_key: str = "button_delay" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalDimmingUpSpeed(ZHANumberConfigurationEntity): - """Inovelli local dimming up speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_up_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_up_local" - _attr_translation_key: str = "dimming_speed_up_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalRampRateOffToOn(ZHANumberConfigurationEntity): - """Inovelli off to on local ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_off_to_on_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_off_to_on_local" - _attr_translation_key: str = "ramp_rate_off_to_on_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingSpeedOffToOn(ZHANumberConfigurationEntity): - """Inovelli off to on remote ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_off_to_on_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_off_to_on_remote" - _attr_translation_key: str = "ramp_rate_off_to_on_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingDownSpeed(ZHANumberConfigurationEntity): - """Inovelli remote dimming down speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_down_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_down_remote" - _attr_translation_key: str = "dimming_speed_down_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalDimmingDownSpeed(ZHANumberConfigurationEntity): - """Inovelli local dimming down speed configuration entity.""" - - _unique_id_suffix = "dimming_speed_down_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "dimming_speed_down_local" - _attr_translation_key: str = "dimming_speed_down_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLocalRampRateOnToOff(ZHANumberConfigurationEntity): - """Inovelli local on to off ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_on_to_off_local" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_on_to_off_local" - _attr_translation_key: str = "ramp_rate_on_to_off_local" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliRemoteDimmingSpeedOnToOff(ZHANumberConfigurationEntity): - """Inovelli remote on to off ramp rate configuration entity.""" - - _unique_id_suffix = "ramp_rate_on_to_off_remote" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 127 - _attribute_name = "ramp_rate_on_to_off_remote" - _attr_translation_key: str = "ramp_rate_on_to_off_remote" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliMinimumLoadDimmingLevel(ZHANumberConfigurationEntity): - """Inovelli minimum load dimming level configuration entity.""" - - _unique_id_suffix = "minimum_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 254 - _attribute_name = "minimum_level" - _attr_translation_key: str = "minimum_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliMaximumLoadDimmingLevel(ZHANumberConfigurationEntity): - """Inovelli maximum load dimming level configuration entity.""" - - _unique_id_suffix = "maximum_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 255 - _attribute_name = "maximum_level" - _attr_translation_key: str = "maximum_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliAutoShutoffTimer(ZHANumberConfigurationEntity): - """Inovelli automatic switch shutoff timer configuration entity.""" - - _unique_id_suffix = "auto_off_timer" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 32767 - _attribute_name = "auto_off_timer" - _attr_translation_key: str = "auto_off_timer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliQuickStartTime(ZHANumberConfigurationEntity): - """Inovelli fan quick start time configuration entity.""" - - _unique_id_suffix = "quick_start_time" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 10 - _attribute_name = "quick_start_time" - _attr_translation_key: str = "quick_start_time" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliLoadLevelIndicatorTimeout(ZHANumberConfigurationEntity): - """Inovelli load level indicator timeout configuration entity.""" - - _unique_id_suffix = "load_level_indicator_timeout" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 11 - _attribute_name = "load_level_indicator_timeout" - _attr_translation_key: str = "load_level_indicator_timeout" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOnColor(ZHANumberConfigurationEntity): - """Inovelli default all led color when on configuration entity.""" - - _unique_id_suffix = "led_color_when_on" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 255 - _attribute_name = "led_color_when_on" - _attr_translation_key: str = "led_color_when_on" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOffColor(ZHANumberConfigurationEntity): - """Inovelli default all led color when off configuration entity.""" - - _unique_id_suffix = "led_color_when_off" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 255 - _attribute_name = "led_color_when_off" - _attr_translation_key: str = "led_color_when_off" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOnIntensity(ZHANumberConfigurationEntity): - """Inovelli default all led intensity when on configuration entity.""" - - _unique_id_suffix = "led_intensity_when_on" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 100 - _attribute_name = "led_intensity_when_on" - _attr_translation_key: str = "led_intensity_when_on" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDefaultAllLEDOffIntensity(ZHANumberConfigurationEntity): - """Inovelli default all led intensity when off configuration entity.""" - - _unique_id_suffix = "led_intensity_when_off" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 100 - _attribute_name = "led_intensity_when_off" - _attr_translation_key: str = "led_intensity_when_off" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDoubleTapUpLevel(ZHANumberConfigurationEntity): - """Inovelli double tap up level configuration entity.""" - - _unique_id_suffix = "double_tap_up_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 2 - _attr_native_max_value: float = 254 - _attribute_name = "double_tap_up_level" - _attr_translation_key: str = "double_tap_up_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_INOVELLI) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class InovelliDoubleTapDownLevel(ZHANumberConfigurationEntity): - """Inovelli double tap down level configuration entity.""" - - _unique_id_suffix = "double_tap_down_level" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 0 - _attr_native_max_value: float = 254 - _attribute_name = "double_tap_down_level" - _attr_translation_key: str = "double_tap_down_level" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederServingSize(ZHANumberConfigurationEntity): - """Aqara pet feeder serving size configuration entity.""" - - _unique_id_suffix = "serving_size" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 10 - _attribute_name = "serving_size" - _attr_translation_key: str = "serving_size" - - _attr_mode: NumberMode = NumberMode.BOX - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederPortionWeight(ZHANumberConfigurationEntity): - """Aqara pet feeder portion weight configuration entity.""" - - _unique_id_suffix = "portion_weight" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 1 - _attr_native_max_value: float = 100 - _attribute_name = "portion_weight" - _attr_translation_key: str = "portion_weight" - - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfMass.GRAMS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraThermostatAwayTemp(ZHANumberConfigurationEntity): - """Aqara away preset temperature configuration entity.""" - - _unique_id_suffix = "away_preset_temperature" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: float = 5 - _attr_native_max_value: float = 30 - _attr_multiplier: float = 0.01 - _attribute_name = "away_preset_temperature" - _attr_translation_key: str = "away_preset_temperature" - - _attr_mode: NumberMode = NumberMode.SLIDER - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ThermostatLocalTempCalibration(ZHANumberConfigurationEntity): - """Local temperature calibration.""" - - _unique_id_suffix = "local_temperature_calibration" - _attr_native_min_value: float = -2.5 - _attr_native_max_value: float = 2.5 - _attr_native_step: float = 0.1 - _attr_multiplier: float = 0.1 - _attribute_name = "local_temperature_calibration" - _attr_translation_key: str = "local_temperature_calibration" - - _attr_mode: NumberMode = NumberMode.SLIDER - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - models={"TRVZB"}, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffThermostatLocalTempCalibration(ThermostatLocalTempCalibration): - """Local temperature calibration for the Sonoff TRVZB.""" - - _attr_native_min_value: float = -7 - _attr_native_max_value: float = 7 - _attr_native_step: float = 0.2 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffPresenceSenorTimeout(ZHANumberConfigurationEntity): - """Configuration of Sonoff sensor presence detection timeout.""" - - _unique_id_suffix = "presence_detection_timeout" - _attr_entity_category = EntityCategory.CONFIG - _attr_native_min_value: int = 15 - _attr_native_max_value: int = 60 - _attribute_name = "ultrasonic_o_to_u_delay" - _attr_translation_key: str = "presence_detection_timeout" - - _attr_mode: NumberMode = NumberMode.BOX - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZCLTemperatureEntity(ZHANumberConfigurationEntity): - """Common entity class for ZCL temperature input.""" - - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_step: float = 0.01 - _attr_multiplier: float = 0.01 - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ZCLHeatSetpointLimitEntity(ZCLTemperatureEntity): - """Min or max heat setpoint setting on thermostats.""" - - _attr_icon: str = "mdi:thermostat" - _attr_native_step: float = 0.5 - - _min_source = Thermostat.AttributeDefs.abs_min_heat_setpoint_limit.name - _max_source = Thermostat.AttributeDefs.abs_max_heat_setpoint_limit.name + return self.entity_data.entity.native_value @property def native_min_value(self) -> float: """Return the minimum value.""" - # The spec says 0x954D, which is a signed integer, therefore the value is in decimals - min_present_value = self._cluster_handler.cluster.get(self._min_source, -27315) - return min_present_value * self._attr_multiplier + return self.entity_data.entity.native_min_value @property def native_max_value(self) -> float: """Return the maximum value.""" - max_present_value = self._cluster_handler.cluster.get(self._max_source, 0x7FFF) - return max_present_value * self._attr_multiplier + return self.entity_data.entity.native_max_value + @property + def native_step(self) -> float | None: + """Return the value step.""" + return self.entity_data.entity.native_step -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class MaxHeatSetpointLimit(ZCLHeatSetpointLimitEntity): - """Max heat setpoint setting on thermostats. + @property + def native_unit_of_measurement(self) -> str | None: + """Return the unit the value is expressed in.""" + return self.entity_data.entity.native_unit_of_measurement - Optional thermostat attribute. - """ - - _unique_id_suffix = "max_heat_setpoint_limit" - _attribute_name: str = "max_heat_setpoint_limit" - _attr_translation_key: str = "max_heat_setpoint_limit" - _attr_entity_category = EntityCategory.CONFIG - - _min_source = Thermostat.AttributeDefs.min_heat_setpoint_limit.name - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class MinHeatSetpointLimit(ZCLHeatSetpointLimitEntity): - """Min heat setpoint setting on thermostats. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "min_heat_setpoint_limit" - _attribute_name: str = "min_heat_setpoint_limit" - _attr_translation_key: str = "min_heat_setpoint_limit" - _attr_entity_category = EntityCategory.CONFIG - - _max_source = Thermostat.AttributeDefs.max_heat_setpoint_limit.name - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossExerciseTriggerTime(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set the time to exercise the valve.""" - - _unique_id_suffix = "exercise_trigger_time" - _attribute_name: str = "exercise_trigger_time" - _attr_translation_key: str = "exercise_trigger_time" - _attr_native_min_value: int = 0 - _attr_native_max_value: int = 1439 - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfTime.MINUTES - _attr_icon: str = "mdi:clock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossExternalMeasuredRoomSensor(ZCLTemperatureEntity): - """Danfoss proprietary attribute to communicate the value of the external temperature sensor.""" - - _unique_id_suffix = "external_measured_room_sensor" - _attribute_name: str = "external_measured_room_sensor" - _attr_translation_key: str = "external_temperature_sensor" - _attr_native_min_value: float = -80 - _attr_native_max_value: float = 35 - _attr_icon: str = "mdi:thermometer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossLoadRoomMean(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set a value for the load.""" - - _unique_id_suffix = "load_room_mean" - _attribute_name: str = "load_room_mean" - _attr_translation_key: str = "load_room_mean" - _attr_native_min_value: int = -8000 - _attr_native_max_value: int = 2000 - _attr_mode: NumberMode = NumberMode.BOX - _attr_icon: str = "mdi:scale-balance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossRegulationSetpointOffset(ZHANumberConfigurationEntity): - """Danfoss proprietary attribute to set the regulation setpoint offset.""" - - _unique_id_suffix = "regulation_setpoint_offset" - _attribute_name: str = "regulation_setpoint_offset" - _attr_translation_key: str = "regulation_setpoint_offset" - _attr_mode: NumberMode = NumberMode.BOX - _attr_native_unit_of_measurement: str = UnitOfTemperature.CELSIUS - _attr_icon: str = "mdi:thermostat" - _attr_native_min_value: float = -2.5 - _attr_native_max_value: float = 2.5 - _attr_native_step: float = 0.1 - _attr_multiplier = 1 / 10 + @convert_zha_error_to_ha_error + async def async_set_native_value(self, value: float) -> None: + """Update the current value from HA.""" + await self.entity_data.entity.async_set_native_value(value=value) + self.async_write_ha_state() diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index 44b7304c58e..82c30b7678a 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections.abc import AsyncIterator import contextlib from contextlib import suppress import copy @@ -13,6 +14,7 @@ from typing import Any, Self from bellows.config import CONF_USE_THREAD import voluptuous as vol +from zha.application.const import RadioType from zigpy.application import ControllerApplication import zigpy.backups from zigpy.config import ( @@ -29,14 +31,13 @@ from homeassistant.components import usb from homeassistant.core import HomeAssistant from . import repairs -from .core.const import ( +from .const import ( CONF_RADIO_TYPE, CONF_ZIGPY, DEFAULT_DATABASE_NAME, EZSP_OVERWRITE_EUI64, - RadioType, ) -from .core.helpers import get_zha_data +from .helpers import get_zha_data # Only the common radio types will be autoprobed, ordered by new device popularity. # XBee takes too long to probe since it scans through all possible bauds and likely has @@ -157,7 +158,7 @@ class ZhaRadioManager: return mgr @contextlib.asynccontextmanager - async def connect_zigpy_app(self) -> ControllerApplication: + async def connect_zigpy_app(self) -> AsyncIterator[ControllerApplication]: """Connect to the radio with the current config and then clean up.""" assert self.radio_type is not None @@ -177,7 +178,6 @@ class ZhaRadioManager: app_config[CONF_DEVICE] = self.device_settings app_config[CONF_NWK_BACKUP_ENABLED] = False app_config[CONF_USE_THREAD] = False - app_config = self.radio_type.controller.SCHEMA(app_config) app = await self.radio_type.controller.new( app_config, auto_form=False, start_radio=False diff --git a/homeassistant/components/zha/repairs/__init__.py b/homeassistant/components/zha/repairs/__init__.py index 3d8f2553baa..3fcbdb66bbc 100644 --- a/homeassistant/components/zha/repairs/__init__.py +++ b/homeassistant/components/zha/repairs/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN from .network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, NetworkSettingsInconsistentFlow, diff --git a/homeassistant/components/zha/repairs/network_settings_inconsistent.py b/homeassistant/components/zha/repairs/network_settings_inconsistent.py index 2598ff8f98a..ef38ebc3d47 100644 --- a/homeassistant/components/zha/repairs/network_settings_inconsistent.py +++ b/homeassistant/components/zha/repairs/network_settings_inconsistent.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN from ..radio_manager import ZhaRadioManager _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py index 3cd22c99ec7..4d6d1ae52d8 100644 --- a/homeassistant/components/zha/repairs/wrong_silabs_firmware.py +++ b/homeassistant/components/zha/repairs/wrong_silabs_firmware.py @@ -19,7 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir -from ..core.const import DOMAIN +from ..const import DOMAIN _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/zha/select.py b/homeassistant/components/zha/select.py index 026a85fbfdc..fdb47b550fe 100644 --- a/homeassistant/components/zha/select.py +++ b/homeassistant/components/zha/select.py @@ -2,56 +2,26 @@ from __future__ import annotations -from enum import Enum import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zhaquirks.danfoss import thermostat as danfoss_thermostat -from zhaquirks.quirk_ids import ( - DANFOSS_ALLY_THERMOSTAT, - TUYA_PLUG_MANUFACTURER, - TUYA_PLUG_ONOFF, -) -from zhaquirks.xiaomi.aqara.magnet_ac01 import OppleCluster as MagnetAC01OppleCluster -from zhaquirks.xiaomi.aqara.switch_acn047 import OppleCluster as T2RelayOppleCluster -from zigpy import types -from zigpy.quirks.v2 import ZCLEnumMetadata -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.clusters.security import IasWd +from typing import Any from homeassistant.components.select import SelectEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant, State, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_HUE_OCCUPANCY, - CLUSTER_HANDLER_IAS_WD, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_OCCUPANCY, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, - Strobe, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - - -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SELECT -) _LOGGER = logging.getLogger(__name__) @@ -68,731 +38,38 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, + zha_async_add_entities, async_add_entities, + ZHAEnumSelectEntity, entities_to_create, ), ) config_entry.async_on_unload(unsub) -class ZHAEnumSelectEntity(ZhaEntity, SelectEntity): +class ZHAEnumSelectEntity(ZHAEntity, SelectEntity): """Representation of a ZHA select entity.""" - _attr_entity_category = EntityCategory.CONFIG - _attribute_name: str - _enum: type[Enum] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this select entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - self._attribute_name = self._enum.__name__ - self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA select entity.""" + super().__init__(entity_data, **kwargs) + self._attr_options = self.entity_data.entity.info_object.options @property def current_option(self) -> str | None: """Return the selected entity option to represent the entity state.""" - option = self._cluster_handler.data_cache.get(self._attribute_name) - if option is None: - return None - return option.name.replace("_", " ") + return self.entity_data.entity.current_option + @convert_zha_error_to_ha_error async def async_select_option(self, option: str) -> None: """Change the selected option.""" - self._cluster_handler.data_cache[self._attribute_name] = self._enum[ - option.replace(" ", "_") - ] + await self.entity_data.entity.async_select_option(option=option) self.async_write_ha_state() @callback - def async_restore_last_state(self, last_state) -> None: - """Restore previous state.""" - if last_state.state and last_state.state != STATE_UNKNOWN: - self._cluster_handler.data_cache[self._attribute_name] = self._enum[ - last_state.state.replace(" ", "_") - ] - - -class ZHANonZCLSelectEntity(ZHAEnumSelectEntity): - """Representation of a ZHA select entity with no ZCL interaction.""" - - @property - def available(self) -> bool: - """Return entity availability.""" - return True - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultToneSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren tone select entity.""" - - _unique_id_suffix = IasWd.Warning.WarningMode.__name__ - _enum = IasWd.Warning.WarningMode - _attr_translation_key: str = "default_siren_tone" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultSirenLevelSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren level select entity.""" - - _unique_id_suffix = IasWd.Warning.SirenLevel.__name__ - _enum = IasWd.Warning.SirenLevel - _attr_translation_key: str = "default_siren_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultStrobeLevelSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren strobe level select entity.""" - - _unique_id_suffix = IasWd.StrobeLevel.__name__ - _enum = IasWd.StrobeLevel - _attr_translation_key: str = "default_strobe_level" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHADefaultStrobeSelectEntity(ZHANonZCLSelectEntity): - """Representation of a ZHA default siren strobe select entity.""" - - _unique_id_suffix = Strobe.__name__ - _enum = Strobe - _attr_translation_key: str = "default_strobe" - - -class ZCLEnumSelectEntity(ZhaEntity, SelectEntity): - """Representation of a ZHA ZCL enum select entity.""" - - _attribute_name: str - _attr_entity_category = EntityCategory.CONFIG - _enum: type[Enum] - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, + def restore_external_state_attributes(self, state: State) -> None: + """Restore entity state.""" + if state.state and state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + self.entity_data.entity.restore_external_state_attributes( + state=state.state, ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this select entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - self._attr_options = [entry.name.replace("_", " ") for entry in self._enum] - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - self._enum = entity_metadata.enum - - @property - def current_option(self) -> str | None: - """Return the selected entity option to represent the entity state.""" - option = self._cluster_handler.cluster.get(self._attribute_name) - if option is None: - return None - option = self._enum(option) - return option.name.replace("_", " ") - - async def async_select_option(self, option: str) -> None: - """Change the selected option.""" - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._enum[option.replace(" ", "_")]} - ) - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class ZHAStartupOnOffSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA startup onoff select entity.""" - - _unique_id_suffix = OnOff.StartUpOnOff.__name__ - _attribute_name = "start_up_on_off" - _enum = OnOff.StartUpOnOff - _attr_translation_key: str = "start_up_on_off" - - -class TuyaPowerOnState(types.enum8): - """Tuya power on state enum.""" - - Off = 0x00 - On = 0x01 - LastState = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER -) -class TuyaPowerOnStateSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA power on state select entity.""" - - _unique_id_suffix = "power_on_state" - _attribute_name = "power_on_state" - _enum = TuyaPowerOnState - _attr_translation_key: str = "power_on_state" - - -class TuyaBacklightMode(types.enum8): - """Tuya switch backlight mode enum.""" - - Off = 0x00 - LightWhenOn = 0x01 - LightWhenOff = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -class TuyaBacklightModeSelectEntity(ZCLEnumSelectEntity): - """Representation of a ZHA backlight mode select entity.""" - - _unique_id_suffix = "backlight_mode" - _attribute_name = "backlight_mode" - _enum = TuyaBacklightMode - _attr_translation_key: str = "backlight_mode" - - -class MoesBacklightMode(types.enum8): - """MOES switch backlight mode enum.""" - - Off = 0x00 - LightWhenOn = 0x01 - LightWhenOff = 0x02 - Freeze = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", quirk_ids=TUYA_PLUG_MANUFACTURER -) -class MoesBacklightModeSelectEntity(ZCLEnumSelectEntity): - """Moes devices have a different backlight mode select options.""" - - _unique_id_suffix = "backlight_mode" - _attribute_name = "backlight_mode" - _enum = MoesBacklightMode - _attr_translation_key: str = "backlight_mode" - - -class AqaraMotionSensitivities(types.enum8): - """Aqara motion sensitivities.""" - - Low = 0x01 - Medium = 0x02 - High = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.motion.ac01", "lumi.motion.ac02", "lumi.motion.agl04"}, -) -class AqaraMotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "motion_sensitivity" - _enum = AqaraMotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class HueV1MotionSensitivities(types.enum8): - """Hue v1 motion sensitivities.""" - - Low = 0x00 - Medium = 0x01 - High = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML001"}, -) -class HueV1MotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "sensitivity" - _enum = HueV1MotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class HueV2MotionSensitivities(types.enum8): - """Hue v2 motion sensitivities.""" - - Lowest = 0x00 - Low = 0x01 - Medium = 0x02 - High = 0x03 - Highest = 0x04 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUE_OCCUPANCY, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML002", "SML003", "SML004"}, -) -class HueV2MotionSensitivity(ZCLEnumSelectEntity): - """Representation of a ZHA motion sensitivity configuration entity.""" - - _unique_id_suffix = "motion_sensitivity" - _attribute_name = "sensitivity" - _enum = HueV2MotionSensitivities - _attr_translation_key: str = "motion_sensitivity" - - -class AqaraMonitoringModess(types.enum8): - """Aqara monitoring modes.""" - - Undirected = 0x00 - Left_Right = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class AqaraMonitoringMode(ZCLEnumSelectEntity): - """Representation of a ZHA monitoring mode configuration entity.""" - - _unique_id_suffix = "monitoring_mode" - _attribute_name = "monitoring_mode" - _enum = AqaraMonitoringModess - _attr_translation_key: str = "monitoring_mode" - - -class AqaraApproachDistances(types.enum8): - """Aqara approach distances.""" - - Far = 0x00 - Medium = 0x01 - Near = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac01"} -) -class AqaraApproachDistance(ZCLEnumSelectEntity): - """Representation of a ZHA approach distance configuration entity.""" - - _unique_id_suffix = "approach_distance" - _attribute_name = "approach_distance" - _enum = AqaraApproachDistances - _attr_translation_key: str = "approach_distance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.magnet.ac01"} -) -class AqaraMagnetAC01DetectionDistance(ZCLEnumSelectEntity): - """Representation of a ZHA detection distance configuration entity.""" - - _unique_id_suffix = "detection_distance" - _attribute_name = "detection_distance" - _enum = MagnetAC01OppleCluster.DetectionDistance - _attr_translation_key: str = "detection_distance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelaySwitchMode(ZCLEnumSelectEntity): - """Representation of a ZHA switch mode configuration entity.""" - - _unique_id_suffix = "switch_mode" - _attribute_name = "switch_mode" - _enum = T2RelayOppleCluster.SwitchMode - _attr_translation_key: str = "switch_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelaySwitchType(ZCLEnumSelectEntity): - """Representation of a ZHA switch type configuration entity.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = T2RelayOppleCluster.SwitchType - _attr_translation_key: str = "switch_type" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelayStartupOnOff(ZCLEnumSelectEntity): - """Representation of a ZHA startup on off configuration entity.""" - - _unique_id_suffix = "startup_on_off" - _attribute_name = "startup_on_off" - _enum = T2RelayOppleCluster.StartupOnOff - _attr_translation_key: str = "start_up_on_off" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.switch.acn047"} -) -class AqaraT2RelayDecoupledMode(ZCLEnumSelectEntity): - """Representation of a ZHA switch decoupled mode configuration entity.""" - - _unique_id_suffix = "decoupled_mode" - _attribute_name = "decoupled_mode" - _enum = T2RelayOppleCluster.DecoupledMode - _attr_translation_key: str = "decoupled_mode" - - -class InovelliOutputMode(types.enum1): - """Inovelli output mode.""" - - Dimmer = 0x00 - OnOff = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliOutputModeEntity(ZCLEnumSelectEntity): - """Inovelli output mode control.""" - - _unique_id_suffix = "output_mode" - _attribute_name = "output_mode" - _enum = InovelliOutputMode - _attr_translation_key: str = "output_mode" - - -class InovelliSwitchType(types.enum8): - """Inovelli switch mode.""" - - Single_Pole = 0x00 - Three_Way_Dumb = 0x01 - Three_Way_AUX = 0x02 - Single_Pole_Full_Sine = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM31-SN"} -) -class InovelliSwitchTypeEntity(ZCLEnumSelectEntity): - """Inovelli switch type control.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = InovelliSwitchType - _attr_translation_key: str = "switch_type" - - -class InovelliFanSwitchType(types.enum1): - """Inovelli fan switch mode.""" - - Load_Only = 0x00 - Three_Way_AUX = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliFanSwitchTypeEntity(ZCLEnumSelectEntity): - """Inovelli fan switch type control.""" - - _unique_id_suffix = "switch_type" - _attribute_name = "switch_type" - _enum = InovelliFanSwitchType - _attr_translation_key: str = "switch_type" - - -class InovelliLedScalingMode(types.enum1): - """Inovelli led mode.""" - - VZM31SN = 0x00 - LZW31SN = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliLedScalingModeEntity(ZCLEnumSelectEntity): - """Inovelli led mode control.""" - - _unique_id_suffix = "led_scaling_mode" - _attribute_name = "led_scaling_mode" - _enum = InovelliLedScalingMode - _attr_translation_key: str = "led_scaling_mode" - - -class InovelliFanLedScalingMode(types.enum8): - """Inovelli fan led mode.""" - - VZM31SN = 0x00 - Grade_1 = 0x01 - Grade_2 = 0x02 - Grade_3 = 0x03 - Grade_4 = 0x04 - Grade_5 = 0x05 - Grade_6 = 0x06 - Grade_7 = 0x07 - Grade_8 = 0x08 - Grade_9 = 0x09 - Adaptive = 0x0A - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliFanLedScalingModeEntity(ZCLEnumSelectEntity): - """Inovelli fan switch led mode control.""" - - _unique_id_suffix = "smart_fan_led_display_levels" - _attribute_name = "smart_fan_led_display_levels" - _enum = InovelliFanLedScalingMode - _attr_translation_key: str = "smart_fan_led_display_levels" - - -class InovelliNonNeutralOutput(types.enum1): - """Inovelli non neutral output selection.""" - - Low = 0x00 - High = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliNonNeutralOutputEntity(ZCLEnumSelectEntity): - """Inovelli non neutral output control.""" - - _unique_id_suffix = "increased_non_neutral_output" - _attribute_name = "increased_non_neutral_output" - _enum = InovelliNonNeutralOutput - _attr_translation_key: str = "increased_non_neutral_output" - - -class AqaraFeedingMode(types.enum8): - """Feeding mode.""" - - Manual = 0x00 - Schedule = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederMode(ZCLEnumSelectEntity): - """Representation of an Aqara pet feeder mode configuration entity.""" - - _unique_id_suffix = "feeding_mode" - _attribute_name = "feeding_mode" - _enum = AqaraFeedingMode - _attr_translation_key: str = "feeding_mode" - - -class AqaraThermostatPresetMode(types.enum8): - """Thermostat preset mode.""" - - Manual = 0x00 - Auto = 0x01 - Away = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatPreset(ZCLEnumSelectEntity): - """Representation of an Aqara thermostat preset configuration entity.""" - - _unique_id_suffix = "preset" - _attribute_name = "preset" - _enum = AqaraThermostatPresetMode - _attr_translation_key: str = "preset" - - -class SonoffPresenceDetectionSensitivityEnum(types.enum8): - """Enum for detection sensitivity select entity.""" - - Low = 0x01 - Medium = 0x02 - High = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_OCCUPANCY, models={"SNZB-06P"} -) -class SonoffPresenceDetectionSensitivity(ZCLEnumSelectEntity): - """Entity to set the detection sensitivity of the Sonoff SNZB-06P.""" - - _unique_id_suffix = "detection_sensitivity" - _attribute_name = "ultrasonic_u_to_o_threshold" - _enum = SonoffPresenceDetectionSensitivityEnum - _attr_translation_key: str = "detection_sensitivity" - - -class KeypadLockoutEnum(types.enum8): - """Keypad lockout options.""" - - Unlock = 0x00 - Lock1 = 0x01 - Lock2 = 0x02 - Lock3 = 0x03 - Lock4 = 0x04 - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names="thermostat_ui") -class KeypadLockout(ZCLEnumSelectEntity): - """Mandatory attribute for thermostat_ui cluster. - - Often only the first two are implemented, and Lock2 to Lock4 should map to Lock1 in the firmware. - This however covers all bases. - """ - - _unique_id_suffix = "keypad_lockout" - _attribute_name: str = "keypad_lockout" - _enum = KeypadLockoutEnum - _attr_translation_key: str = "keypad_lockout" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossExerciseDayOfTheWeek(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the day of the week for exercising.""" - - _unique_id_suffix = "exercise_day_of_week" - _attribute_name = "exercise_day_of_week" - _attr_translation_key: str = "exercise_day_of_week" - _enum = danfoss_thermostat.DanfossExerciseDayOfTheWeekEnum - _attr_icon: str = "mdi:wrench-clock" - - -class DanfossOrientationEnum(types.enum8): - """Vertical or Horizontal.""" - - Horizontal = 0x00 - Vertical = 0x01 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossOrientation(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the orientation of the valve. - - Needed for biasing the internal temperature sensor. - This is implemented as an enum here, but is a boolean on the device. - """ - - _unique_id_suffix = "orientation" - _attribute_name = "orientation" - _attr_translation_key: str = "valve_orientation" - _enum = DanfossOrientationEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossAdaptationRunControl(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for controlling the current adaptation run.""" - - _unique_id_suffix = "adaptation_run_control" - _attribute_name = "adaptation_run_control" - _attr_translation_key: str = "adaptation_run_command" - _enum = danfoss_thermostat.DanfossAdaptationRunControlEnum - - -class DanfossControlAlgorithmScaleFactorEnum(types.enum8): - """The time scale factor for changing the opening of the valve. - - Not all values are given, therefore there are some extrapolated values with a margin of error of about 5 minutes. - This is implemented as an enum here, but is a number on the device. - """ - - quick_5min = 0x01 - - quick_10min = 0x02 # extrapolated - quick_15min = 0x03 # extrapolated - quick_25min = 0x04 # extrapolated - - moderate_30min = 0x05 - - moderate_40min = 0x06 # extrapolated - moderate_50min = 0x07 # extrapolated - moderate_60min = 0x08 # extrapolated - moderate_70min = 0x09 # extrapolated - - slow_80min = 0x0A - - quick_open_disabled = 0x11 # not sure what it does; also requires lower 4 bits to be in [1, 10] I assume - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossControlAlgorithmScaleFactor(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the scale factor of the setpoint filter time constant.""" - - _unique_id_suffix = "control_algorithm_scale_factor" - _attribute_name = "control_algorithm_scale_factor" - _attr_translation_key: str = "setpoint_response_time" - _enum = DanfossControlAlgorithmScaleFactorEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="thermostat_ui", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossViewingDirection(ZCLEnumSelectEntity): - """Danfoss proprietary attribute for setting the viewing direction of the screen.""" - - _unique_id_suffix = "viewing_direction" - _attribute_name = "viewing_direction" - _attr_translation_key: str = "viewing_direction" - _enum = danfoss_thermostat.DanfossViewingDirectionEnum diff --git a/homeassistant/components/zha/sensor.py b/homeassistant/components/zha/sensor.py index 99d950dc06a..dde000b24b5 100644 --- a/homeassistant/components/zha/sensor.py +++ b/homeassistant/components/zha/sensor.py @@ -2,115 +2,71 @@ from __future__ import annotations -import asyncio -from dataclasses import dataclass -from datetime import timedelta -import enum +from collections.abc import Mapping import functools import logging -import numbers -import random -from typing import TYPE_CHECKING, Any, Self +from typing import Any -from zhaquirks.danfoss import thermostat as danfoss_thermostat -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT -from zigpy import types -from zigpy.quirks.v2 import ZCLEnumMetadata, ZCLSensorMetadata -from zigpy.state import Counter, State -from zigpy.zcl.clusters.closures import WindowCovering -from zigpy.zcl.clusters.general import Basic - -from homeassistant.components.climate import HVACAction from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, - SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_BILLION, - CONCENTRATION_PARTS_PER_MILLION, - LIGHT_LUX, - PERCENTAGE, - SIGNAL_STRENGTH_DECIBELS_MILLIWATT, - EntityCategory, - Platform, - UnitOfApparentPower, - UnitOfElectricCurrent, - UnitOfElectricPotential, - UnitOfEnergy, - UnitOfFrequency, - UnitOfMass, - UnitOfPower, - UnitOfPressure, - UnitOfTemperature, - UnitOfTime, - UnitOfVolume, - UnitOfVolumeFlowRate, -) -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import StateType -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_ANALOG_INPUT, - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_DEVICE_TEMPERATURE, - CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - CLUSTER_HANDLER_HUMIDITY, - CLUSTER_HANDLER_ILLUMINANCE, - CLUSTER_HANDLER_LEAF_WETNESS, - CLUSTER_HANDLER_POWER_CONFIGURATION, - CLUSTER_HANDLER_PRESSURE, - CLUSTER_HANDLER_SMARTENERGY_METERING, - CLUSTER_HANDLER_SOIL_MOISTURE, - CLUSTER_HANDLER_TEMPERATURE, - CLUSTER_HANDLER_THERMOSTAT, - DATA_ZHA, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, + EntityData, + async_add_entities as zha_async_add_entities, + exclude_none_values, + get_zha_data, ) -from .core.helpers import get_zha_data, validate_device_class, validate_unit -from .core.registries import SMARTTHINGS_HUMIDITY_CLUSTER, ZHA_ENTITIES -from .entity import BaseZhaEntity, ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -BATTERY_SIZES = { - 0: "No battery", - 1: "Built in", - 2: "Other", - 3: "AA", - 4: "AAA", - 5: "C", - 6: "D", - 7: "CR2", - 8: "CR123A", - 9: "CR2450", - 10: "CR2032", - 11: "CR1632", - 255: "Unknown", -} _LOGGER = logging.getLogger(__name__) -CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER = ( - f"cluster_handler_0x{SMARTTHINGS_HUMIDITY_CLUSTER:04x}" -) -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SENSOR) -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SENSOR) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SENSOR -) +# For backwards compatibility and transparency, all expected extra state attributes are +# explicitly listed below. These should have been sensors themselves but for whatever +# reason were not created as such. They will be migrated to independent sensor entities +# in a future release. +_EXTRA_STATE_ATTRIBUTES: set[str] = { + # Battery + "battery_size", + "battery_quantity", + "battery_voltage", + # Power + "measurement_type", + "apparent_power_max", + "rms_current_max", + "rms_voltage_max", + "ac_frequency_max", + "power_factor_max", + "active_power_max", + # Smart Energy metering + "device_type", + "status", + "zcl_unit_of_measurement", + # Danfoss bitmaps + "In_progress", + "Valve_characteristic_found", + "Valve_characteristic_lost", + "Top_pcb_sensor_error", + "Side_pcb_sensor_error", + "Non_volatile_memory_error", + "Unknown_hw_error", + "Motor_error", + "Invalid_internal_communication", + "Invalid_clock_information", + "Radio_communication_error", + "Encoder_jammed", + "Low_battery", + "Critical_low_battery", +} async def async_setup_entry( @@ -126,1504 +82,76 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, Sensor, entities_to_create ), ) config_entry.async_on_unload(unsub) # pylint: disable-next=hass-invalid-inheritance # needs fixing -class Sensor(ZhaEntity, SensorEntity): - """Base ZHA sensor.""" +class Sensor(ZHAEntity, SensorEntity): + """ZHA sensor.""" - _attribute_name: int | str | None = None - _decimals: int = 1 - _divisor: int = 1 - _multiplier: int | float = 1 + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA select entity.""" + super().__init__(entity_data, **kwargs) + entity = self.entity_data.entity - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. + if entity.device_class is not None: + self._attr_device_class = SensorDeviceClass(entity.device_class) - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name + if entity.state_class is not None: + self._attr_state_class = SensorStateClass(entity.state_class) + + if hasattr(entity.info_object, "unit") and entity.info_object.unit is not None: + self._attr_native_unit_of_measurement = entity.info_object.unit + + if ( + hasattr(entity, "entity_description") + and entity.entity_description is not None ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None + entity_description = entity.entity_description - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: ZCLSensorMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.divisor is not None: - self._divisor = entity_metadata.divisor - if entity_metadata.multiplier is not None: - self._multiplier = entity_metadata.multiplier - if entity_metadata.device_class is not None: - self._attr_device_class = validate_device_class( - SensorDeviceClass, - entity_metadata.device_class, - Platform.SENSOR.value, - _LOGGER, - ) - if entity_metadata.device_class is None and entity_metadata.unit is not None: - self._attr_native_unit_of_measurement = validate_unit( - entity_metadata.unit - ).value - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - assert self._attribute_name is not None - raw_state = self._cluster_handler.cluster.get(self._attribute_name) - if raw_state is None: - return None - return self.formatter(raw_state) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any) -> None: - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - def formatter(self, value: int | enum.IntEnum) -> int | float | str | None: - """Numeric pass-through formatter.""" - if self._decimals > 0: - return round( - float(value * self._multiplier) / self._divisor, self._decimals - ) - return round(float(value * self._multiplier) / self._divisor) - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PollableSensor(Sensor): - """Base ZHA sensor that polls for state.""" - - _use_custom_polling: bool = True - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._cancel_refresh_handle: CALLBACK_TYPE | None = None - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - if self._use_custom_polling: - refresh_interval = random.randint(30, 60) - self._cancel_refresh_handle = async_track_time_interval( - self.hass, self._refresh, timedelta(seconds=refresh_interval) - ) - self.debug("started polling with refresh interval of %s", refresh_interval) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - if self._cancel_refresh_handle is not None: - self._cancel_refresh_handle() - self._cancel_refresh_handle = None - self.debug("stopped polling during device removal") - await super().async_will_remove_from_hass() - - async def _refresh(self, time): - """Call async_update at a constrained random interval.""" - if self._zha_device.available and self.hass.data[DATA_ZHA].allow_polling: - self.debug("polling for updated state") - await self.async_update() - self.async_write_ha_state() - else: - self.debug( - "skipping polling for updated state, available: %s, allow polled requests: %s", - self._zha_device.available, - self.hass.data[DATA_ZHA].allow_polling, - ) - - -class DeviceCounterSensor(BaseZhaEntity, SensorEntity): - """Device counter sensor.""" - - _attr_should_poll = True - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_entity_registry_enabled_default = False - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - counter_groups: str, - counter_group: str, - counter: str, - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - return cls( - unique_id, zha_device, counter_groups, counter_group, counter, **kwargs - ) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - counter_groups: str, - counter_group: str, - counter: str, - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, **kwargs) - state: State = self._zha_device.gateway.application_controller.state - self._zigpy_counter: Counter = ( - getattr(state, counter_groups).get(counter_group, {}).get(counter, None) - ) - self._attr_name: str = self._zigpy_counter.name - self.remove_future: asyncio.Future - - @property - def available(self) -> bool: - """Return entity availability.""" - return self._zha_device.available - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - self.remove_future = self.hass.loop.create_future() - self._zha_device.gateway.register_entity_reference( - self._zha_device.ieee, - self.entity_id, - self._zha_device, - {}, - self.device_info, - self.remove_future, - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect entity object when removed.""" - await super().async_will_remove_from_hass() - self.zha_device.gateway.remove_entity_reference(self) - self.remove_future.set_result(True) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return self._zigpy_counter.value - - async def async_update(self) -> None: - """Retrieve latest state.""" - self.async_write_ha_state() - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class EnumSensor(Sensor): - """Sensor with value from enum.""" - - _attr_device_class: SensorDeviceClass = SensorDeviceClass.ENUM - _enum: type[enum.Enum] - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this sensor.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._attr_options = [e.name for e in self._enum] - - def _init_from_quirks_metadata(self, entity_metadata: ZCLEnumMetadata) -> None: - """Init this entity from the quirks metadata.""" - ZhaEntity._init_from_quirks_metadata(self, entity_metadata) # noqa: SLF001 - self._attribute_name = entity_metadata.attribute_name - self._enum = entity_metadata.enum - - def formatter(self, value: int) -> str | None: - """Use name of enum.""" - assert self._enum is not None - return self._enum(value).name - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ANALOG_INPUT, - manufacturers="Digi", - stop_on_match_group=CLUSTER_HANDLER_ANALOG_INPUT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AnalogInput(Sensor): - """Sensor that displays analog input values.""" - - _attribute_name = "present_value" - _attr_translation_key: str = "analog_input" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_POWER_CONFIGURATION) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Battery(Sensor): - """Battery sensor of power configuration cluster.""" - - _attribute_name = "battery_percentage_remaining" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.BATTERY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_native_unit_of_measurement = PERCENTAGE - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Unlike any other entity, PowerConfiguration cluster may not support - battery_percent_remaining attribute, but zha-device-handlers takes care of it - so create the entity regardless - """ - if zha_device.is_mains_powered: - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @staticmethod - def formatter(value: int) -> int | None: - """Return the state of the entity.""" - # per zcl specs battery percent is reported at 200% ¯\_(ツ)_/¯ - if not isinstance(value, numbers.Number) or value == -1 or value == 255: - return None - return round(value / 2) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for battery sensors.""" - state_attrs = {} - battery_size = self._cluster_handler.cluster.get("battery_size") - if battery_size is not None: - state_attrs["battery_size"] = BATTERY_SIZES.get(battery_size, "Unknown") - battery_quantity = self._cluster_handler.cluster.get("battery_quantity") - if battery_quantity is not None: - state_attrs["battery_quantity"] = battery_quantity - battery_voltage = self._cluster_handler.cluster.get("battery_voltage") - if battery_voltage is not None: - state_attrs["battery_voltage"] = round(battery_voltage / 10, 2) - return state_attrs - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - models={"VZM31-SN", "SP 234", "outletv4"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurement(PollableSensor): - """Active power measurement.""" - - _use_custom_polling: bool = False - _attribute_name = "active_power" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_native_unit_of_measurement: str = UnitOfPower.WATT - _div_mul_prefix: str | None = "ac_power" - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for sensor.""" - attrs = {} - if self._cluster_handler.measurement_type is not None: - attrs["measurement_type"] = self._cluster_handler.measurement_type - - max_attr_name = f"{self._attribute_name}_max" - - try: - max_v = self._cluster_handler.cluster.get(max_attr_name) - except KeyError: - pass - else: - if max_v is not None: - attrs[max_attr_name] = str(self.formatter(max_v)) - - return attrs - - def formatter(self, value: int) -> int | float: - """Return 'normalized' value.""" - if self._div_mul_prefix: - multiplier = getattr( - self._cluster_handler, f"{self._div_mul_prefix}_multiplier" - ) - divisor = getattr(self._cluster_handler, f"{self._div_mul_prefix}_divisor") - else: - multiplier = self._multiplier - divisor = self._divisor - value = float(value * multiplier) / divisor - if value < 100 and divisor > 1: - return round(value, self._decimals) - return round(value) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, - stop_on_match_group=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PolledElectricalMeasurement(ElectricalMeasurement): - """Polled active power measurement.""" - - _use_custom_polling: bool = True - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementApparentPower(PolledElectricalMeasurement): - """Apparent power measurement.""" - - _attribute_name = "apparent_power" - _unique_id_suffix = "apparent_power" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.APPARENT_POWER - _attr_native_unit_of_measurement = UnitOfApparentPower.VOLT_AMPERE - _div_mul_prefix = "ac_power" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementRMSCurrent(PolledElectricalMeasurement): - """RMS current measurement.""" - - _attribute_name = "rms_current" - _unique_id_suffix = "rms_current" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CURRENT - _attr_native_unit_of_measurement = UnitOfElectricCurrent.AMPERE - _div_mul_prefix = "ac_current" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementRMSVoltage(PolledElectricalMeasurement): - """RMS Voltage measurement.""" - - _attribute_name = "rms_voltage" - _unique_id_suffix = "rms_voltage" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLTAGE - _attr_native_unit_of_measurement = UnitOfElectricPotential.VOLT - _div_mul_prefix = "ac_voltage" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementFrequency(PolledElectricalMeasurement): - """Frequency measurement.""" - - _attribute_name = "ac_frequency" - _unique_id_suffix = "ac_frequency" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.FREQUENCY - _attr_translation_key: str = "ac_frequency" - _attr_native_unit_of_measurement = UnitOfFrequency.HERTZ - _div_mul_prefix = "ac_frequency" - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ELECTRICAL_MEASUREMENT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ElectricalMeasurementPowerFactor(PolledElectricalMeasurement): - """Power Factor measurement.""" - - _attribute_name = "power_factor" - _unique_id_suffix = "power_factor" - _use_custom_polling = False # Poll indirectly by ElectricalMeasurementSensor - _attr_device_class: SensorDeviceClass = SensorDeviceClass.POWER_FACTOR - _attr_native_unit_of_measurement = PERCENTAGE - _div_mul_prefix = None - - -@MULTI_MATCH( - generic_ids=CLUSTER_HANDLER_ST_HUMIDITY_CLUSTER, - stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, -) -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_HUMIDITY, - stop_on_match_group=CLUSTER_HANDLER_HUMIDITY, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Humidity(Sensor): - """Humidity sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_SOIL_MOISTURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SoilMoisture(Sensor): - """Soil Moisture sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "soil_moisture" - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_LEAF_WETNESS) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class LeafWetness(Sensor): - """Leaf Wetness sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.HUMIDITY - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "leaf_wetness" - _divisor = 100 - _attr_native_unit_of_measurement = PERCENTAGE - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_ILLUMINANCE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Illuminance(Sensor): - """Illuminance Sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.ILLUMINANCE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_native_unit_of_measurement = LIGHT_LUX - - def formatter(self, value: int) -> int | None: - """Convert illumination data.""" - if value == 0: - return 0 - if value == 0xFFFF: - return None - return round(pow(10, ((value - 1) / 10000))) - - -@dataclass(frozen=True, kw_only=True) -class SmartEnergyMeteringEntityDescription(SensorEntityDescription): - """Dataclass that describes a Zigbee smart energy metering entity.""" - - key: str = "instantaneous_demand" - state_class: SensorStateClass | None = SensorStateClass.MEASUREMENT - scale: int = 1 - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergyMetering(PollableSensor): - """Metering sensor.""" - - entity_description: SmartEnergyMeteringEntityDescription - _use_custom_polling: bool = False - _attribute_name = "instantaneous_demand" - _attr_translation_key: str = "instantaneous_demand" - - _ENTITY_DESCRIPTION_MAP = { - 0x00: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - ), - 0x01: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - device_class=None, # volume flow rate is not supported yet - ), - 0x02: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE, - device_class=None, # volume flow rate is not supported yet - ), - 0x03: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - device_class=None, # volume flow rate is not supported yet - scale=100, - ), - 0x04: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # US gallons per hour - device_class=None, # volume flow rate is not supported yet - ), - 0x05: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}/{UnitOfTime.HOURS}", # IMP gallons per hour - device_class=None, # needs to be None as imperial gallons are not supported - ), - 0x06: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPower.BTU_PER_HOUR, - device_class=None, - state_class=None, - ), - 0x07: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"l/{UnitOfTime.HOURS}", - device_class=None, # volume flow rate is not supported yet - ), - 0x08: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - ), # gauge - 0x09: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - ), # absolute - 0x0A: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfVolume.CUBIC_FEET}/{UnitOfTime.HOURS}", # cubic feet per hour - device_class=None, # volume flow rate is not supported yet - scale=1000, - ), - 0x0B: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement="unitless", device_class=None, state_class=None - ), - 0x0C: SmartEnergyMeteringEntityDescription( - native_unit_of_measurement=f"{UnitOfEnergy.MEGA_JOULE}/{UnitOfTime.SECONDS}", - device_class=None, # needs to be None as MJ/s is not supported - ), - } - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - entity_description = self._ENTITY_DESCRIPTION_MAP.get( - self._cluster_handler.unit_of_measurement - ) - if entity_description is not None: - self.entity_description = entity_description - - def formatter(self, value: int) -> int | float: - """Pass through cluster handler formatter.""" - return self._cluster_handler.demand_formatter(value) - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return device state attrs for battery sensors.""" - attrs = {} - if self._cluster_handler.device_type is not None: - attrs["device_type"] = self._cluster_handler.device_type - if (status := self._cluster_handler.status) is not None: - if isinstance(status, enum.IntFlag): - attrs["status"] = str( - status.name if status.name is not None else status.value + if entity_description.state_class is not None: + self._attr_state_class = SensorStateClass( + entity_description.state_class.value + ) + + if entity_description.scale is not None: + self._attr_scale = entity_description.scale + + if entity_description.native_unit_of_measurement is not None: + self._attr_native_unit_of_measurement = ( + entity_description.native_unit_of_measurement + ) + + if entity_description.device_class is not None: + self._attr_device_class = SensorDeviceClass( + entity_description.device_class.value ) - else: - attrs["status"] = str(status)[len(status.__class__.__name__) + 1 :] - return attrs @property def native_value(self) -> StateType: """Return the state of the entity.""" - state = super().native_value - if hasattr(self, "entity_description") and state is not None: - return float(state) * self.entity_description.scale - - return state - - -@dataclass(frozen=True, kw_only=True) -class SmartEnergySummationEntityDescription(SmartEnergyMeteringEntityDescription): - """Dataclass that describes a Zigbee smart energy summation entity.""" - - key: str = "summation_delivered" - state_class: SensorStateClass | None = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergySummation(SmartEnergyMetering): - """Smart Energy Metering summation sensor.""" - - entity_description: SmartEnergySummationEntityDescription - _attribute_name = "current_summ_delivered" - _unique_id_suffix = "summation_delivered" - _attr_translation_key: str = "summation_delivered" - - _ENTITY_DESCRIPTION_MAP = { - 0x00: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - ), - 0x01: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, - device_class=SensorDeviceClass.VOLUME, - ), - 0x02: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - ), - 0x03: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - scale=100, - ), - 0x04: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.GALLONS, # US gallons - device_class=SensorDeviceClass.VOLUME, - ), - 0x05: SmartEnergySummationEntityDescription( - native_unit_of_measurement=f"IMP {UnitOfVolume.GALLONS}", - device_class=None, # needs to be None as imperial gallons are not supported - ), - 0x06: SmartEnergySummationEntityDescription( - native_unit_of_measurement="BTU", device_class=None, state_class=None - ), - 0x07: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.LITERS, - device_class=SensorDeviceClass.VOLUME, - ), - 0x08: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - ), # gauge - 0x09: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfPressure.KPA, - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - ), # absolute - 0x0A: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfVolume.CUBIC_FEET, - device_class=SensorDeviceClass.VOLUME, - scale=1000, - ), - 0x0B: SmartEnergySummationEntityDescription( - native_unit_of_measurement="unitless", device_class=None, state_class=None - ), - 0x0C: SmartEnergySummationEntityDescription( - native_unit_of_measurement=UnitOfEnergy.MEGA_JOULE, - device_class=SensorDeviceClass.ENERGY, - ), - } - - def formatter(self, value: int) -> int | float: - """Numeric pass-through formatter.""" - if self._cluster_handler.unit_of_measurement != 0: - return self._cluster_handler.summa_formatter(value) - - cooked = ( - float(self._cluster_handler.multiplier * value) - / self._cluster_handler.divisor - ) - return round(cooked, 3) - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"TS011F", "ZLinky_TIC", "TICMeter"}, - stop_on_match_group=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PolledSmartEnergySummation(SmartEnergySummation): - """Polled Smart Energy Metering summation sensor.""" - - _use_custom_polling: bool = True - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier1SmartEnergySummation(PolledSmartEnergySummation): - """Tier 1 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier1_summ_delivered" - _unique_id_suffix = "tier1_summation_delivered" - _attr_translation_key: str = "tier1_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier2SmartEnergySummation(PolledSmartEnergySummation): - """Tier 2 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier2_summ_delivered" - _unique_id_suffix = "tier2_summation_delivered" - _attr_translation_key: str = "tier2_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier3SmartEnergySummation(PolledSmartEnergySummation): - """Tier 3 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier3_summ_delivered" - _unique_id_suffix = "tier3_summation_delivered" - _attr_translation_key: str = "tier3_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier4SmartEnergySummation(PolledSmartEnergySummation): - """Tier 4 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier4_summ_delivered" - _unique_id_suffix = "tier4_summation_delivered" - _attr_translation_key: str = "tier4_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier5SmartEnergySummation(PolledSmartEnergySummation): - """Tier 5 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier5_summ_delivered" - _unique_id_suffix = "tier5_summation_delivered" - _attr_translation_key: str = "tier5_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, - models={"ZLinky_TIC", "TICMeter"}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Tier6SmartEnergySummation(PolledSmartEnergySummation): - """Tier 6 Smart Energy Metering summation sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_tier6_summ_delivered" - _unique_id_suffix = "tier6_summation_delivered" - _attr_translation_key: str = "tier6_summation_delivered" - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_SMARTENERGY_METERING, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SmartEnergySummationReceived(PolledSmartEnergySummation): - """Smart Energy Metering summation received sensor.""" - - _use_custom_polling = False # Poll indirectly by PolledSmartEnergySummation - _attribute_name = "current_summ_received" - _unique_id_suffix = "summation_received" - _attr_translation_key: str = "summation_received" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - This attribute only started to be initialized in HA 2024.2.0, - so the entity would be created on the first HA start after the - upgrade for existing devices, as the initialization to see if - an attribute is unsupported happens later in the background. - To avoid creating unnecessary entities for existing devices, - wait until the attribute was properly initialized once for now. - """ - if cluster_handlers[0].cluster.get(cls._attribute_name) is None: - return None - return super().create_entity(unique_id, zha_device, cluster_handlers, **kwargs) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_PRESSURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Pressure(Sensor): - """Pressure sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.PRESSURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _attr_native_unit_of_measurement = UnitOfPressure.HPA - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_TEMPERATURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class Temperature(Sensor): - """Temperature Sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _divisor = 100 - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_DEVICE_TEMPERATURE) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DeviceTemperature(Sensor): - """Device Temperature Sensor.""" - - _attribute_name = "current_temperature" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.TEMPERATURE - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "device_temperature" - _divisor = 100 - _attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH(cluster_handler_names="carbon_dioxide_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class CarbonDioxideConcentration(Sensor): - """Carbon Dioxide Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO2 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH(cluster_handler_names="carbon_monoxide_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class CarbonMonoxideConcentration(Sensor): - """Carbon Monoxide Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.CO - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH(generic_ids="cluster_handler_0x042e", stop_on_match_group="voc_level") -@MULTI_MATCH(cluster_handler_names="voc_level", stop_on_match_group="voc_level") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class VOCLevel(Sensor): - """VOC Level sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - - -@MULTI_MATCH( - cluster_handler_names="voc_level", - models="lumi.airmonitor.acn01", - stop_on_match_group="voc_level", -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PPBVOCLevel(Sensor): - """VOC Level sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = ( - SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS - ) - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_BILLION - - -@MULTI_MATCH(cluster_handler_names="pm25") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PM25(Sensor): - """Particulate Matter 2.5 microns or less sensor.""" - - _attribute_name = "measured_value" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.PM25 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _decimals = 0 - _multiplier = 1 - _attr_native_unit_of_measurement = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER - - -@MULTI_MATCH(cluster_handler_names="formaldehyde_concentration") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class FormaldehydeConcentration(Sensor): - """Formaldehyde Concentration sensor.""" - - _attribute_name = "measured_value" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_translation_key: str = "formaldehyde" - _decimals = 0 - _multiplier = 1e6 - _attr_native_unit_of_measurement = CONCENTRATION_PARTS_PER_MILLION - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class ThermostatHVACAction(Sensor): - """Thermostat HVAC action sensor.""" - - _unique_id_suffix = "hvac_action" - _attr_translation_key: str = "hvac_action" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) + return self.entity_data.entity.native_value @property - def native_value(self) -> str | None: - """Return the current HVAC action.""" - if ( - self._cluster_handler.pi_heating_demand is None - and self._cluster_handler.pi_cooling_demand is None - ): - return self._rm_rs_action - return self._pi_demand_action - - @property - def _rm_rs_action(self) -> HVACAction | None: - """Return the current HVAC action based on running mode and running state.""" - - if (running_state := self._cluster_handler.running_state) is None: + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return entity specific state attributes.""" + entity = self.entity_data.entity + if entity.extra_state_attribute_names is None: return None - rs_heat = ( - self._cluster_handler.RunningState.Heat_State_On - | self._cluster_handler.RunningState.Heat_2nd_Stage_On + if not entity.extra_state_attribute_names <= _EXTRA_STATE_ATTRIBUTES: + _LOGGER.warning( + "Unexpected extra state attributes found for sensor %s: %s", + entity, + entity.extra_state_attribute_names - _EXTRA_STATE_ATTRIBUTES, + ) + + return exclude_none_values( + { + name: entity.state.get(name) + for name in entity.extra_state_attribute_names + } ) - if running_state & rs_heat: - return HVACAction.HEATING - - rs_cool = ( - self._cluster_handler.RunningState.Cool_State_On - | self._cluster_handler.RunningState.Cool_2nd_Stage_On - ) - if running_state & rs_cool: - return HVACAction.COOLING - - running_state = self._cluster_handler.running_state - if running_state and running_state & ( - self._cluster_handler.RunningState.Fan_State_On - | self._cluster_handler.RunningState.Fan_2nd_Stage_On - | self._cluster_handler.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - - running_state = self._cluster_handler.running_state - if running_state and running_state & self._cluster_handler.RunningState.Idle: - return HVACAction.IDLE - - if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF - - @property - def _pi_demand_action(self) -> HVACAction: - """Return the current HVAC action based on pi_demands.""" - - heating_demand = self._cluster_handler.pi_heating_demand - if heating_demand is not None and heating_demand > 0: - return HVACAction.HEATING - cooling_demand = self._cluster_handler.pi_cooling_demand - if cooling_demand is not None and cooling_demand > 0: - return HVACAction.COOLING - - if self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off: - return HVACAction.IDLE - return HVACAction.OFF - - -@MULTI_MATCH( - cluster_handler_names={CLUSTER_HANDLER_THERMOSTAT}, - manufacturers="Sinope Technologies", - stop_on_match_group=CLUSTER_HANDLER_THERMOSTAT, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SinopeHVACAction(ThermostatHVACAction): - """Sinope Thermostat HVAC action sensor.""" - - @property - def _rm_rs_action(self) -> HVACAction: - """Return the current HVAC action based on running mode and running state.""" - - running_mode = self._cluster_handler.running_mode - if running_mode == self._cluster_handler.RunningMode.Heat: - return HVACAction.HEATING - if running_mode == self._cluster_handler.RunningMode.Cool: - return HVACAction.COOLING - - running_state = self._cluster_handler.running_state - if running_state and running_state & ( - self._cluster_handler.RunningState.Fan_State_On - | self._cluster_handler.RunningState.Fan_2nd_Stage_On - | self._cluster_handler.RunningState.Fan_3rd_Stage_On - ): - return HVACAction.FAN - if ( - self._cluster_handler.system_mode != self._cluster_handler.SystemMode.Off - and running_mode == self._cluster_handler.SystemMode.Off - ): - return HVACAction.IDLE - return HVACAction.OFF - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class RSSISensor(Sensor): - """RSSI sensor for a device.""" - - _attribute_name = "rssi" - _unique_id_suffix = "rssi" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_device_class: SensorDeviceClass | None = SensorDeviceClass.SIGNAL_STRENGTH - _attr_native_unit_of_measurement: str | None = SIGNAL_STRENGTH_DECIBELS_MILLIWATT - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_entity_registry_enabled_default = False - _attr_should_poll = True # BaseZhaEntity defaults to False - _attr_translation_key: str = "rssi" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - key = f"{CLUSTER_HANDLER_BASIC}_{cls._unique_id_suffix}" - if ZHA_ENTITIES.prevent_entity_creation(Platform.SENSOR, zha_device.ieee, key): - return None - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @property - def native_value(self) -> StateType: - """Return the state of the entity.""" - return getattr(self._zha_device.device, self._attribute_name) - - -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_BASIC) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class LQISensor(RSSISensor): - """LQI sensor for a device.""" - - _attribute_name = "lqi" - _unique_id_suffix = "lqi" - _attr_device_class = None - _attr_native_unit_of_measurement = None - _attr_translation_key = "lqi" - - -@MULTI_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_htnnfasr", - }, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class TimeLeft(Sensor): - """Sensor that displays time left value.""" - - _attribute_name = "timer_time_left" - _unique_id_suffix = "time_left" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "timer_time_left" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class IkeaDeviceRunTime(Sensor): - """Sensor that displays device run time (in minutes).""" - - _attribute_name = "device_run_time" - _unique_id_suffix = "device_run_time" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "device_run_time" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - - -@MULTI_MATCH(cluster_handler_names="ikea_airpurifier") -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class IkeaFilterRunTime(Sensor): - """Sensor that displays run time of the current filter (in minutes).""" - - _attribute_name = "filter_run_time" - _unique_id_suffix = "filter_run_time" - _attr_device_class: SensorDeviceClass = SensorDeviceClass.DURATION - _attr_translation_key: str = "filter_run_time" - _attr_native_unit_of_measurement = UnitOfTime.MINUTES - _attr_entity_category: EntityCategory = EntityCategory.DIAGNOSTIC - - -class AqaraFeedingSource(types.enum8): - """Aqara pet feeder feeding source.""" - - Feeder = 0x01 - HomeAssistant = 0x02 - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederLastFeedingSource(EnumSensor): - """Sensor that displays the last feeding source of pet feeder.""" - - _attribute_name = "last_feeding_source" - _unique_id_suffix = "last_feeding_source" - _attr_translation_key: str = "last_feeding_source" - _enum = AqaraFeedingSource - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederLastFeedingSize(Sensor): - """Sensor that displays the last feeding size of the pet feeder.""" - - _attribute_name = "last_feeding_size" - _unique_id_suffix = "last_feeding_size" - _attr_translation_key: str = "last_feeding_size" - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederPortionsDispensed(Sensor): - """Sensor that displays the number of portions dispensed by the pet feeder.""" - - _attribute_name = "portions_dispensed" - _unique_id_suffix = "portions_dispensed" - _attr_translation_key: str = "portions_dispensed_today" - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraPetFeederWeightDispensed(Sensor): - """Sensor that displays the weight dispensed by the pet feeder.""" - - _attribute_name = "weight_dispensed" - _unique_id_suffix = "weight_dispensed" - _attr_translation_key: str = "weight_dispensed_today" - _attr_native_unit_of_measurement = UnitOfMass.GRAMS - _attr_state_class: SensorStateClass = SensorStateClass.TOTAL_INCREASING - - -@MULTI_MATCH(cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraSmokeDensityDbm(Sensor): - """Sensor that displays the smoke density of an Aqara smoke sensor in dB/m.""" - - _attribute_name = "smoke_density_dbm" - _unique_id_suffix = "smoke_density_dbm" - _attr_translation_key: str = "smoke_density" - _attr_native_unit_of_measurement = "dB/m" - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_suggested_display_precision: int = 3 - - -class SonoffIlluminationStates(types.enum8): - """Enum for displaying last Illumination state.""" - - Dark = 0x00 - Light = 0x01 - - -@MULTI_MATCH(cluster_handler_names="sonoff_manufacturer", models={"SNZB-06P"}) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SonoffPresenceSenorIlluminationStatus(EnumSensor): - """Sensor that displays the illumination status the last time peresence was detected.""" - - _attribute_name = "last_illumination_state" - _unique_id_suffix = "last_illumination" - _attr_translation_key: str = "last_illumination_state" - _enum = SonoffIlluminationStates - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class PiHeatingDemand(Sensor): - """Sensor that displays the percentage of heating power demanded. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "pi_heating_demand" - _attribute_name = "pi_heating_demand" - _attr_translation_key: str = "pi_heating_demand" - _attr_native_unit_of_measurement = PERCENTAGE - _decimals = 0 - _attr_state_class: SensorStateClass = SensorStateClass.MEASUREMENT - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -class SetpointChangeSourceEnum(types.enum8): - """The source of the setpoint change.""" - - Manual = 0x00 - Schedule = 0x01 - External = 0x02 - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class SetpointChangeSource(EnumSensor): - """Sensor that displays the source of the setpoint change. - - Optional thermostat attribute. - """ - - _unique_id_suffix = "setpoint_change_source" - _attribute_name = "setpoint_change_source" - _attr_translation_key: str = "setpoint_change_source" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _enum = SetpointChangeSourceEnum - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class WindowCoveringTypeSensor(EnumSensor): - """Sensor that displays the type of a cover device.""" - - _attribute_name: str = WindowCovering.AttributeDefs.window_covering_type.name - _enum = WindowCovering.WindowCoveringType - _unique_id_suffix: str = WindowCovering.AttributeDefs.window_covering_type.name - _attr_translation_key: str = WindowCovering.AttributeDefs.window_covering_type.name - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_icon = "mdi:curtains" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, models={"lumi.curtain.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraCurtainMotorPowerSourceSensor(EnumSensor): - """Sensor that displays the power source of the Aqara E1 curtain motor device.""" - - _attribute_name: str = Basic.AttributeDefs.power_source.name - _enum = Basic.PowerSource - _unique_id_suffix: str = Basic.AttributeDefs.power_source.name - _attr_translation_key: str = Basic.AttributeDefs.power_source.name - _attr_entity_category = EntityCategory.DIAGNOSTIC - _attr_icon = "mdi:battery-positive" - - -class AqaraE1HookState(types.enum8): - """Aqara hook state.""" - - Unlocked = 0x00 - Locked = 0x01 - Locking = 0x02 - Unlocking = 0x03 - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class AqaraCurtainHookStateSensor(EnumSensor): - """Representation of a ZHA curtain mode configuration entity.""" - - _attribute_name = "hooks_state" - _enum = AqaraE1HookState - _unique_id_suffix = "hooks_state" - _attr_translation_key: str = "hooks_state" - _attr_icon: str = "mdi:hook" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class BitMapSensor(Sensor): - """A sensor with only state attributes. - - The sensor value will be an aggregate of the state attributes. - """ - - _bitmap: types.bitmap8 | types.bitmap16 - - def formatter(self, _value: int) -> str: - """Summary of all attributes.""" - binary_state_attributes = [ - key for (key, elem) in self.extra_state_attributes.items() if elem - ] - - return "something" if binary_state_attributes else "nothing" - - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Bitmap.""" - value = self._cluster_handler.cluster.get(self._attribute_name) - - state_attr = {} - - for bit in list(self._bitmap): - if value is None: - state_attr[bit.name] = False - else: - state_attr[bit.name] = bit in self._bitmap(value) - - return state_attr - - -@MULTI_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossOpenWindowDetection(EnumSensor): - """Danfoss proprietary attribute. - - Sensor that displays whether the TRV detects an open window using the temperature sensor. - """ - - _unique_id_suffix = "open_window_detection" - _attribute_name = "open_window_detection" - _attr_translation_key: str = "open_window_detected" - _attr_icon: str = "mdi:window-open" - _enum = danfoss_thermostat.DanfossOpenWindowDetectionEnum - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossLoadEstimate(Sensor): - """Danfoss proprietary attribute for communicating its estimate of the radiator load.""" - - _unique_id_suffix = "load_estimate" - _attribute_name = "load_estimate" - _attr_translation_key: str = "load_estimate" - _attr_icon: str = "mdi:scale-balance" - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossAdaptationRunStatus(BitMapSensor): - """Danfoss proprietary attribute for showing the status of the adaptation run.""" - - _unique_id_suffix = "adaptation_run_status" - _attribute_name = "adaptation_run_status" - _attr_translation_key: str = "adaptation_run_status" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _bitmap = danfoss_thermostat.DanfossAdaptationRunStatusBitmap - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossPreheatTime(Sensor): - """Danfoss proprietary attribute for communicating the time when it starts pre-heating.""" - - _unique_id_suffix = "preheat_time" - _attribute_name = "preheat_time" - _attr_translation_key: str = "preheat_time" - _attr_icon: str = "mdi:radiator" - _attr_entity_registry_enabled_default = False - _attr_entity_category = EntityCategory.DIAGNOSTIC - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="diagnostic", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossSoftwareErrorCode(BitMapSensor): - """Danfoss proprietary attribute for communicating the error code.""" - - _unique_id_suffix = "sw_error_code" - _attribute_name = "sw_error_code" - _attr_translation_key: str = "software_error" - _attr_entity_category = EntityCategory.DIAGNOSTIC - _bitmap = danfoss_thermostat.DanfossSoftwareErrorCodeBitmap - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="diagnostic", - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -# pylint: disable-next=hass-invalid-inheritance # needs fixing -class DanfossMotorStepCounter(Sensor): - """Danfoss proprietary attribute for communicating the motor step counter.""" - - _unique_id_suffix = "motor_step_counter" - _attribute_name = "motor_step_counter" - _attr_translation_key: str = "motor_stepcount" - _attr_entity_category = EntityCategory.DIAGNOSTIC diff --git a/homeassistant/components/zha/siren.py b/homeassistant/components/zha/siren.py index 3aab332f746..9d876d9ca4d 100644 --- a/homeassistant/components/zha/siren.py +++ b/homeassistant/components/zha/siren.py @@ -2,11 +2,18 @@ from __future__ import annotations -from collections.abc import Callable import functools -from typing import TYPE_CHECKING, Any, cast +from typing import Any -from zigpy.zcl.clusters.security import IasWd as WD +from zha.application.const import ( + WARNING_DEVICE_MODE_BURGLAR, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_MODE_FIRE, + WARNING_DEVICE_MODE_FIRE_PANIC, + WARNING_DEVICE_MODE_POLICE_PANIC, +) +from zha.application.platforms.siren import SirenEntityFeature as ZHASirenEntityFeature from homeassistant.components.siren import ( ATTR_DURATION, @@ -17,38 +24,18 @@ from homeassistant.components.siren import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later -from .core import discovery -from .core.cluster_handlers.security import IasWdClusterHandler -from .core.const import ( - CLUSTER_HANDLER_IAS_WD, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - WARNING_DEVICE_MODE_BURGLAR, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_MODE_FIRE, - WARNING_DEVICE_MODE_FIRE_PANIC, - WARNING_DEVICE_MODE_POLICE_PANIC, - WARNING_DEVICE_MODE_STOP, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_NO, - Strobe, + EntityData, + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -MULTI_MATCH = functools.partial(ZHA_ENTITIES.multipass_match, Platform.SIREN) -DEFAULT_DURATION = 5 # seconds async def async_setup_entry( @@ -64,115 +51,61 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, - async_add_entities, - entities_to_create, + zha_async_add_entities, async_add_entities, ZHASiren, entities_to_create ), ) config_entry.async_on_unload(unsub) -@MULTI_MATCH(cluster_handler_names=CLUSTER_HANDLER_IAS_WD) -class ZHASiren(ZhaEntity, SirenEntity): +class ZHASiren(ZHAEntity, SirenEntity): """Representation of a ZHA siren.""" - _attr_name: str = "Siren" + _attr_available_tones: list[int | str] | dict[int, str] | None = { + WARNING_DEVICE_MODE_BURGLAR: "Burglar", + WARNING_DEVICE_MODE_FIRE: "Fire", + WARNING_DEVICE_MODE_EMERGENCY: "Emergency", + WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", + WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", + WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", + } - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs, - ) -> None: - """Init this siren.""" - self._attr_supported_features = ( - SirenEntityFeature.TURN_ON - | SirenEntityFeature.TURN_OFF - | SirenEntityFeature.DURATION - | SirenEntityFeature.VOLUME_SET - | SirenEntityFeature.TONES - ) - self._attr_available_tones: list[int | str] | dict[int, str] | None = { - WARNING_DEVICE_MODE_BURGLAR: "Burglar", - WARNING_DEVICE_MODE_FIRE: "Fire", - WARNING_DEVICE_MODE_EMERGENCY: "Emergency", - WARNING_DEVICE_MODE_POLICE_PANIC: "Police Panic", - WARNING_DEVICE_MODE_FIRE_PANIC: "Fire Panic", - WARNING_DEVICE_MODE_EMERGENCY_PANIC: "Emergency Panic", - } - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._cluster_handler: IasWdClusterHandler = cast( - IasWdClusterHandler, cluster_handlers[0] - ) - self._attr_is_on: bool = False - self._off_listener: Callable[[], None] | None = None + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA siren.""" + super().__init__(entity_data, **kwargs) + features: SirenEntityFeature = SirenEntityFeature(0) + zha_features: ZHASirenEntityFeature = self.entity_data.entity.supported_features + + if ZHASirenEntityFeature.TURN_ON in zha_features: + features |= SirenEntityFeature.TURN_ON + if ZHASirenEntityFeature.TURN_OFF in zha_features: + features |= SirenEntityFeature.TURN_OFF + if ZHASirenEntityFeature.TONES in zha_features: + features |= SirenEntityFeature.TONES + if ZHASirenEntityFeature.VOLUME_SET in zha_features: + features |= SirenEntityFeature.VOLUME_SET + if ZHASirenEntityFeature.DURATION in zha_features: + features |= SirenEntityFeature.DURATION + + self._attr_supported_features = features + + @property + def is_on(self) -> bool: + """Return True if entity is on.""" + return self.entity_data.entity.is_on + + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn on siren.""" - if self._off_listener: - self._off_listener() - self._off_listener = None - tone_cache = self._cluster_handler.data_cache.get( - WD.Warning.WarningMode.__name__ - ) - siren_tone = ( - tone_cache.value - if tone_cache is not None - else WARNING_DEVICE_MODE_EMERGENCY - ) - siren_duration = DEFAULT_DURATION - level_cache = self._cluster_handler.data_cache.get( - WD.Warning.SirenLevel.__name__ - ) - siren_level = ( - level_cache.value if level_cache is not None else WARNING_DEVICE_SOUND_HIGH - ) - strobe_cache = self._cluster_handler.data_cache.get(Strobe.__name__) - should_strobe = ( - strobe_cache.value if strobe_cache is not None else Strobe.No_Strobe - ) - strobe_level_cache = self._cluster_handler.data_cache.get( - WD.StrobeLevel.__name__ - ) - strobe_level = ( - strobe_level_cache.value - if strobe_level_cache is not None - else WARNING_DEVICE_STROBE_HIGH - ) - if (duration := kwargs.get(ATTR_DURATION)) is not None: - siren_duration = duration - if (tone := kwargs.get(ATTR_TONE)) is not None: - siren_tone = tone - if (level := kwargs.get(ATTR_VOLUME_LEVEL)) is not None: - siren_level = int(level) - await self._cluster_handler.issue_start_warning( - mode=siren_tone, - warning_duration=siren_duration, - siren_level=siren_level, - strobe=should_strobe, - strobe_duty_cycle=50 if should_strobe else 0, - strobe_intensity=strobe_level, - ) - self._attr_is_on = True - self._off_listener = async_call_later( - self._zha_device.hass, siren_duration, self.async_set_off + await self.entity_data.entity.async_turn_on( + duration=kwargs.get(ATTR_DURATION), + tone=kwargs.get(ATTR_TONE), + volume_level=kwargs.get(ATTR_VOLUME_LEVEL), ) self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn off siren.""" - await self._cluster_handler.issue_start_warning( - mode=WARNING_DEVICE_MODE_STOP, strobe=WARNING_DEVICE_STROBE_NO - ) - self._attr_is_on = False - self.async_write_ha_state() - - @callback - def async_set_off(self, _) -> None: - """Set is_on to False and write HA state.""" - self._attr_is_on = False - if self._off_listener: - self._off_listener() - self._off_listener = None + await self.entity_data.entity.async_turn_off() self.async_write_ha_state() diff --git a/homeassistant/components/zha/strings.json b/homeassistant/components/zha/strings.json index f25fdf1ebe4..5d81556564a 100644 --- a/homeassistant/components/zha/strings.json +++ b/homeassistant/components/zha/strings.json @@ -413,7 +413,7 @@ }, "warning_device_squawk": { "name": "Warning device squawk", - "description": "This service uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", + "description": "This action uses the WD capabilities to emit a quick audible/visible pulse called a \"squawk\". The squawk command has no effect if the WD is currently active (warning in progress).", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", @@ -435,7 +435,7 @@ }, "warning_device_warn": { "name": "Warning device starts alert", - "description": "This service starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", + "description": "This action starts the operation of the warning device. The warning device alerts the surrounding area by audible (siren) and visual (strobe) signals.", "fields": { "ieee": { "name": "[%key:component::zha::services::permit::fields::ieee::name%]", diff --git a/homeassistant/components/zha/switch.py b/homeassistant/components/zha/switch.py index f07d3d4c8e3..cb0268f98e0 100644 --- a/homeassistant/components/zha/switch.py +++ b/homeassistant/components/zha/switch.py @@ -4,44 +4,21 @@ from __future__ import annotations import functools import logging -from typing import TYPE_CHECKING, Any, Self - -from zhaquirks.quirk_ids import DANFOSS_ALLY_THERMOSTAT, TUYA_PLUG_ONOFF -from zigpy.quirks.v2 import SwitchMetadata -from zigpy.zcl.clusters.closures import ConfigStatus, WindowCovering, WindowCoveringMode -from zigpy.zcl.clusters.general import OnOff -from zigpy.zcl.foundation import Status +from typing import Any from homeassistant.components.switch import SwitchEntity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, EntityCategory, Platform -from homeassistant.core import HomeAssistant, State, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .core import discovery -from .core.const import ( - CLUSTER_HANDLER_BASIC, - CLUSTER_HANDLER_COVER, - CLUSTER_HANDLER_INOVELLI, - CLUSTER_HANDLER_ON_OFF, - CLUSTER_HANDLER_THERMOSTAT, - ENTITY_METADATA, +from .entity import ZHAEntity +from .helpers import ( SIGNAL_ADD_ENTITIES, - SIGNAL_ATTR_UPDATED, -) -from .core.helpers import get_zha_data -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity, ZhaGroupEntity - -if TYPE_CHECKING: - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice - -STRICT_MATCH = functools.partial(ZHA_ENTITIES.strict_match, Platform.SWITCH) -GROUP_MATCH = functools.partial(ZHA_ENTITIES.group_match, Platform.SWITCH) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.SWITCH + async_add_entities as zha_async_add_entities, + convert_zha_error_to_ha_error, + get_zha_data, ) _LOGGER = logging.getLogger(__name__) @@ -60,752 +37,28 @@ async def async_setup_entry( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, async_add_entities, entities_to_create + zha_async_add_entities, async_add_entities, Switch, entities_to_create ), ) config_entry.async_on_unload(unsub) -@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ON_OFF) -class Switch(ZhaEntity, SwitchEntity): +class Switch(ZHAEntity, SwitchEntity): """ZHA switch.""" - _attr_translation_key = "switch" - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Initialize the ZHA switch.""" - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - self._on_off_cluster_handler = self.cluster_handlers[CLUSTER_HANDLER_ON_OFF] - @property def is_on(self) -> bool: """Return if the switch is on based on the statemachine.""" - if self._on_off_cluster_handler.on_off is None: - return False - return self._on_off_cluster_handler.on_off + return self.entity_data.entity.is_on + @convert_zha_error_to_ha_error async def async_turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - await self._on_off_cluster_handler.turn_on() + await self.entity_data.entity.async_turn_on() self.async_write_ha_state() + @convert_zha_error_to_ha_error async def async_turn_off(self, **kwargs: Any) -> None: """Turn the entity off.""" - await self._on_off_cluster_handler.turn_off() + await self.entity_data.entity.async_turn_off() self.async_write_ha_state() - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._on_off_cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - async def async_update(self) -> None: - """Attempt to retrieve on off state from the switch.""" - self.debug("Polling current state") - await self._on_off_cluster_handler.get_attribute_value( - "on_off", from_cache=False - ) - - -@GROUP_MATCH() -class SwitchGroup(ZhaGroupEntity, SwitchEntity): - """Representation of a switch group.""" - - def __init__( - self, - entity_ids: list[str], - unique_id: str, - group_id: int, - zha_device: ZHADevice, - **kwargs: Any, - ) -> None: - """Initialize a switch group.""" - super().__init__(entity_ids, unique_id, group_id, zha_device, **kwargs) - self._available: bool - self._state: bool - group = self.zha_device.gateway.get_group(self._group_id) - self._on_off_cluster_handler = group.endpoint[OnOff.cluster_id] - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - return bool(self._state) - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - result = await self._on_off_cluster_handler.on() - if result[1] is not Status.SUCCESS: - return - self._state = True - self.async_write_ha_state() - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - result = await self._on_off_cluster_handler.off() - if result[1] is not Status.SUCCESS: - return - self._state = False - self.async_write_ha_state() - - async def async_update(self) -> None: - """Query all members and determine the switch group state.""" - all_states = [self.hass.states.get(x) for x in self._entity_ids] - states: list[State] = list(filter(None, all_states)) - on_states = [state for state in states if state.state == STATE_ON] - - self._state = len(on_states) > 0 - self._available = any(state.state != STATE_UNAVAILABLE for state in states) - - -class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity): - """Representation of a ZHA switch configuration entity.""" - - _attr_entity_category = EntityCategory.CONFIG - _attribute_name: str - _inverter_attribute_name: str | None = None - _force_inverted: bool = False - _off_value: int = 0 - _on_value: int = 1 - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - if ENTITY_METADATA not in kwargs and ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> None: - """Init this number configuration entity.""" - self._cluster_handler: ClusterHandler = cluster_handlers[0] - if ENTITY_METADATA in kwargs: - self._init_from_quirks_metadata(kwargs[ENTITY_METADATA]) - super().__init__(unique_id, zha_device, cluster_handlers, **kwargs) - - def _init_from_quirks_metadata(self, entity_metadata: SwitchMetadata) -> None: - """Init this entity from the quirks metadata.""" - super()._init_from_quirks_metadata(entity_metadata) - self._attribute_name = entity_metadata.attribute_name - if entity_metadata.invert_attribute_name: - self._inverter_attribute_name = entity_metadata.invert_attribute_name - if entity_metadata.force_inverted: - self._force_inverted = entity_metadata.force_inverted - self._off_value = entity_metadata.off_value - self._on_value = entity_metadata.on_value - - async def async_added_to_hass(self) -> None: - """Run when about to be added to hass.""" - await super().async_added_to_hass() - self.async_accept_signal( - self._cluster_handler, SIGNAL_ATTR_UPDATED, self.async_set_state - ) - - @callback - def async_set_state(self, attr_id: int, attr_name: str, value: Any): - """Handle state update from cluster handler.""" - self.async_write_ha_state() - - @property - def inverted(self) -> bool: - """Return True if the switch is inverted.""" - if self._inverter_attribute_name: - return bool( - self._cluster_handler.cluster.get(self._inverter_attribute_name) - ) - return self._force_inverted - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - if self._on_value != 1: - val = self._cluster_handler.cluster.get(self._attribute_name) - val = val == self._on_value - else: - val = bool(self._cluster_handler.cluster.get(self._attribute_name)) - return (not val) if self.inverted else val - - async def async_turn_on_off(self, state: bool) -> None: - """Turn the entity on or off.""" - if self.inverted: - state = not state - if state: - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._on_value} - ) - else: - await self._cluster_handler.write_attributes_safe( - {self._attribute_name: self._off_value} - ) - self.async_write_ha_state() - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - await self.async_turn_on_off(True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self.async_turn_on_off(False) - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - self.debug("Polling current state") - value = await self._cluster_handler.get_attribute_value( - self._attribute_name, from_cache=False - ) - await self._cluster_handler.get_attribute_value( - self._inverter_attribute_name, from_cache=False - ) - self.debug("read value=%s, inverted=%s", value, self.inverted) - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="tuya_manufacturer", - manufacturers={ - "_TZE200_b6wax7g0", - }, -) -class OnOffWindowDetectionFunctionConfigurationEntity(ZHASwitchConfigurationEntity): - """Representation of a ZHA window detection configuration entity.""" - - _unique_id_suffix = "on_off_window_opened_detection" - _attribute_name = "window_detection_function" - _inverter_attribute_name = "window_detection_function_inverter" - _attr_translation_key = "window_detection_function" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.motion.ac02"} -) -class P1MotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA motion triggering configuration entity.""" - - _unique_id_suffix = "trigger_indicator" - _attribute_name = "trigger_indicator" - _attr_translation_key = "trigger_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", - models={"lumi.plug.mmeu01", "lumi.plug.maeu01"}, -) -class XiaomiPlugPowerOutageMemorySwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA power outage memory configuration entity.""" - - _unique_id_suffix = "power_outage_memory" - _attribute_name = "power_outage_memory" - _attr_translation_key = "power_outage_memory" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_BASIC, - manufacturers={"Philips", "Signify Netherlands B.V."}, - models={"SML001", "SML002", "SML003", "SML004"}, -) -class HueMotionTriggerIndicatorSwitch(ZHASwitchConfigurationEntity): - """Representation of a ZHA motion triggering configuration entity.""" - - _unique_id_suffix = "trigger_indicator" - _attribute_name = "trigger_indicator" - _attr_translation_key = "trigger_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class ChildLock(ZHASwitchConfigurationEntity): - """ZHA BinarySensor.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="ikea_airpurifier", - models={"STARKVIND Air purifier", "STARKVIND Air purifier table"}, -) -class DisableLed(ZHASwitchConfigurationEntity): - """ZHA BinarySensor.""" - - _unique_id_suffix = "disable_led" - _attribute_name = "disable_led" - _attr_translation_key = "disable_led" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliInvertSwitch(ZHASwitchConfigurationEntity): - """Inovelli invert switch control.""" - - _unique_id_suffix = "invert_switch" - _attribute_name = "invert_switch" - _attr_translation_key = "invert_switch" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliSmartBulbMode(ZHASwitchConfigurationEntity): - """Inovelli smart bulb mode control.""" - - _unique_id_suffix = "smart_bulb_mode" - _attribute_name = "smart_bulb_mode" - _attr_translation_key = "smart_bulb_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, models={"VZM35-SN"} -) -class InovelliSmartFanMode(ZHASwitchConfigurationEntity): - """Inovelli smart fan mode control.""" - - _unique_id_suffix = "smart_fan_mode" - _attribute_name = "smart_fan_mode" - _attr_translation_key = "smart_fan_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDoubleTapUpEnabled(ZHASwitchConfigurationEntity): - """Inovelli double tap up enabled.""" - - _unique_id_suffix = "double_tap_up_enabled" - _attribute_name = "double_tap_up_enabled" - _attr_translation_key = "double_tap_up_enabled" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDoubleTapDownEnabled(ZHASwitchConfigurationEntity): - """Inovelli double tap down enabled.""" - - _unique_id_suffix = "double_tap_down_enabled" - _attribute_name = "double_tap_down_enabled" - _attr_translation_key = "double_tap_down_enabled" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliAuxSwitchScenes(ZHASwitchConfigurationEntity): - """Inovelli unique aux switch scenes.""" - - _unique_id_suffix = "aux_switch_scenes" - _attribute_name = "aux_switch_scenes" - _attr_translation_key = "aux_switch_scenes" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliBindingOffToOnSyncLevel(ZHASwitchConfigurationEntity): - """Inovelli send move to level with on/off to bound devices.""" - - _unique_id_suffix = "binding_off_to_on_sync_level" - _attribute_name = "binding_off_to_on_sync_level" - _attr_translation_key = "binding_off_to_on_sync_level" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliLocalProtection(ZHASwitchConfigurationEntity): - """Inovelli local protection control.""" - - _unique_id_suffix = "local_protection" - _attribute_name = "local_protection" - _attr_translation_key = "local_protection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliOnOffLEDMode(ZHASwitchConfigurationEntity): - """Inovelli only 1 LED mode control.""" - - _unique_id_suffix = "on_off_led_mode" - _attribute_name = "on_off_led_mode" - _attr_translation_key = "one_led_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliFirmwareProgressLED(ZHASwitchConfigurationEntity): - """Inovelli firmware progress LED control.""" - - _unique_id_suffix = "firmware_progress_led" - _attribute_name = "firmware_progress_led" - _attr_translation_key = "firmware_progress_led" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliRelayClickInOnOffMode(ZHASwitchConfigurationEntity): - """Inovelli relay click in on off mode control.""" - - _unique_id_suffix = "relay_click_in_on_off_mode" - _attribute_name = "relay_click_in_on_off_mode" - _attr_translation_key = "relay_click_in_on_off_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_INOVELLI, -) -class InovelliDisableDoubleTapClearNotificationsMode(ZHASwitchConfigurationEntity): - """Inovelli disable clear notifications double tap control.""" - - _unique_id_suffix = "disable_clear_notifications_double_tap" - _attribute_name = "disable_clear_notifications_double_tap" - _attr_translation_key = "disable_clear_notifications_double_tap" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederLEDIndicator(ZHASwitchConfigurationEntity): - """Representation of a LED indicator configuration entity.""" - - _unique_id_suffix = "disable_led_indicator" - _attribute_name = "disable_led_indicator" - _attr_translation_key = "led_indicator" - _force_inverted = True - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"aqara.feeder.acn001"} -) -class AqaraPetFeederChildLock(ZHASwitchConfigurationEntity): - """Representation of a child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_ON_OFF, quirk_ids=TUYA_PLUG_ONOFF -) -class TuyaChildLockSwitch(ZHASwitchConfigurationEntity): - """Representation of a child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatWindowDetection(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat window detection configuration entity.""" - - _unique_id_suffix = "window_detection" - _attribute_name = "window_detection" - _attr_translation_key = "window_detection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatValveDetection(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat valve detection configuration entity.""" - - _unique_id_suffix = "valve_detection" - _attribute_name = "valve_detection" - _attr_translation_key = "valve_detection" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.airrtc.agl001"} -) -class AqaraThermostatChildLock(ZHASwitchConfigurationEntity): - """Representation of an Aqara thermostat child lock configuration entity.""" - - _unique_id_suffix = "child_lock" - _attribute_name = "child_lock" - _attr_translation_key = "child_lock" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraHeartbeatIndicator(ZHASwitchConfigurationEntity): - """Representation of a heartbeat indicator configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "heartbeat_indicator" - _attribute_name = "heartbeat_indicator" - _attr_translation_key = "heartbeat_indicator" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraLinkageAlarm(ZHASwitchConfigurationEntity): - """Representation of a linkage alarm configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "linkage_alarm" - _attribute_name = "linkage_alarm" - _attr_translation_key = "linkage_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraBuzzerManualMute(ZHASwitchConfigurationEntity): - """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "buzzer_manual_mute" - _attribute_name = "buzzer_manual_mute" - _attr_translation_key = "buzzer_manual_mute" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.sensor_smoke.acn03"} -) -class AqaraBuzzerManualAlarm(ZHASwitchConfigurationEntity): - """Representation of a buzzer manual mute configuration entity for Aqara smoke sensors.""" - - _unique_id_suffix = "buzzer_manual_alarm" - _attribute_name = "buzzer_manual_alarm" - _attr_translation_key = "buzzer_manual_alarm" - - -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_COVER) -class WindowCoveringInversionSwitch(ZHASwitchConfigurationEntity): - """Representation of a switch that controls inversion for window covering devices. - - This is necessary because this cluster uses 2 attributes to control inversion. - """ - - _unique_id_suffix = "inverted" - _attribute_name = WindowCovering.AttributeDefs.config_status.name - _attr_translation_key = "inverted" - - @classmethod - def create_entity( - cls, - unique_id: str, - zha_device: ZHADevice, - cluster_handlers: list[ClusterHandler], - **kwargs: Any, - ) -> Self | None: - """Entity Factory. - - Return entity if it is a supported configuration, otherwise return None - """ - cluster_handler = cluster_handlers[0] - window_covering_mode_attr = ( - WindowCovering.AttributeDefs.window_covering_mode.name - ) - # this entity needs 2 attributes to function - if ( - cls._attribute_name in cluster_handler.cluster.unsupported_attributes - or cls._attribute_name not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(cls._attribute_name) is None - or window_covering_mode_attr - in cluster_handler.cluster.unsupported_attributes - or window_covering_mode_attr - not in cluster_handler.cluster.attributes_by_name - or cluster_handler.cluster.get(window_covering_mode_attr) is None - ): - _LOGGER.debug( - "%s is not supported - skipping %s entity creation", - cls._attribute_name, - cls.__name__, - ) - return None - - return cls(unique_id, zha_device, cluster_handlers, **kwargs) - - @property - def is_on(self) -> bool: - """Return if the switch is on based on the statemachine.""" - config_status = ConfigStatus( - self._cluster_handler.cluster.get(self._attribute_name) - ) - return ConfigStatus.Open_up_commands_reversed in config_status - - async def async_turn_on(self, **kwargs: Any) -> None: - """Turn the entity on.""" - await self._async_on_off(True) - - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the entity off.""" - await self._async_on_off(False) - - async def async_update(self) -> None: - """Attempt to retrieve the state of the entity.""" - self.debug("Polling current state") - await self._cluster_handler.get_attributes( - [ - self._attribute_name, - WindowCovering.AttributeDefs.window_covering_mode.name, - ], - from_cache=False, - only_cache=False, - ) - self.async_write_ha_state() - - async def _async_on_off(self, invert: bool) -> None: - """Turn the entity on or off.""" - name: str = WindowCovering.AttributeDefs.window_covering_mode.name - current_mode: WindowCoveringMode = WindowCoveringMode( - self._cluster_handler.cluster.get(name) - ) - send_command: bool = False - if invert and WindowCoveringMode.Motor_direction_reversed not in current_mode: - current_mode |= WindowCoveringMode.Motor_direction_reversed - send_command = True - elif not invert and WindowCoveringMode.Motor_direction_reversed in current_mode: - current_mode &= ~WindowCoveringMode.Motor_direction_reversed - send_command = True - if send_command: - await self._cluster_handler.write_attributes_safe({name: current_mode}) - await self.async_update() - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names="opple_cluster", models={"lumi.curtain.agl001"} -) -class AqaraE1CurtainMotorHooksLockedSwitch(ZHASwitchConfigurationEntity): - """Representation of a switch that controls whether the curtain motor hooks are locked.""" - - _unique_id_suffix = "hooks_lock" - _attribute_name = "hooks_lock" - _attr_translation_key = "hooks_locked" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossExternalOpenWindowDetected(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating an open window.""" - - _unique_id_suffix = "external_open_window_detected" - _attribute_name: str = "external_open_window_detected" - _attr_translation_key: str = "external_window_sensor" - _attr_icon: str = "mdi:window-open" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossWindowOpenFeature(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute enabling open window detection.""" - - _unique_id_suffix = "window_open_feature" - _attribute_name: str = "window_open_feature" - _attr_translation_key: str = "use_internal_window_detection" - _attr_icon: str = "mdi:window-open" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossMountingModeControl(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for switching to mounting mode.""" - - _unique_id_suffix = "mounting_mode_control" - _attribute_name: str = "mounting_mode_control" - _attr_translation_key: str = "mounting_mode" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossRadiatorCovered(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating full usage of the external temperature sensor.""" - - _unique_id_suffix = "radiator_covered" - _attribute_name: str = "radiator_covered" - _attr_translation_key: str = "prioritize_external_temperature_sensor" - _attr_icon: str = "mdi:thermometer" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossHeatAvailable(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for communicating available heat.""" - - _unique_id_suffix = "heat_available" - _attribute_name: str = "heat_available" - _attr_translation_key: str = "heat_available" - _attr_icon: str = "mdi:water-boiler" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossLoadBalancingEnable(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for enabling load balancing.""" - - _unique_id_suffix = "load_balancing_enable" - _attribute_name: str = "load_balancing_enable" - _attr_translation_key: str = "use_load_balancing" - _attr_icon: str = "mdi:scale-balance" - - -@CONFIG_DIAGNOSTIC_MATCH( - cluster_handler_names=CLUSTER_HANDLER_THERMOSTAT, - quirk_ids={DANFOSS_ALLY_THERMOSTAT}, -) -class DanfossAdaptationRunSettings(ZHASwitchConfigurationEntity): - """Danfoss proprietary attribute for enabling daily adaptation run. - - Actually a bitmap, but only the first bit is used. - """ - - _unique_id_suffix = "adaptation_run_settings" - _attribute_name: str = "adaptation_run_settings" - _attr_translation_key: str = "adaptation_run_enabled" diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index 0cb80d13119..e12d048b190 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -5,11 +5,10 @@ from __future__ import annotations import functools import logging import math -from typing import TYPE_CHECKING, Any +from typing import Any -from zigpy.ota import OtaImageWithMetadata -from zigpy.zcl.clusters.general import Ota -from zigpy.zcl.foundation import Status +from zha.exceptions import ZHAException +from zigpy.application import ControllerApplication from homeassistant.components.update import ( UpdateDeviceClass, @@ -17,8 +16,8 @@ from homeassistant.components.update import ( UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import EntityCategory, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -27,24 +26,17 @@ from homeassistant.helpers.update_coordinator import ( DataUpdateCoordinator, ) -from .core import discovery -from .core.const import CLUSTER_HANDLER_OTA, SIGNAL_ADD_ENTITIES, SIGNAL_ATTR_UPDATED -from .core.helpers import get_zha_data, get_zha_gateway -from .core.registries import ZHA_ENTITIES -from .entity import ZhaEntity - -if TYPE_CHECKING: - from zigpy.application import ControllerApplication - - from .core.cluster_handlers import ClusterHandler - from .core.device import ZHADevice +from .entity import ZHAEntity +from .helpers import ( + SIGNAL_ADD_ENTITIES, + EntityData, + async_add_entities as zha_async_add_entities, + get_zha_data, + get_zha_gateway, +) _LOGGER = logging.getLogger(__name__) -CONFIG_DIAGNOSTIC_MATCH = functools.partial( - ZHA_ENTITIES.config_diagnostic_match, Platform.UPDATE -) - async def async_setup_entry( hass: HomeAssistant, @@ -53,20 +45,20 @@ async def async_setup_entry( ) -> None: """Set up the Zigbee Home Automation update from config entry.""" zha_data = get_zha_data(hass) + if zha_data.update_coordinator is None: + zha_data.update_coordinator = ZHAFirmwareUpdateCoordinator( + hass, get_zha_gateway(hass).application_controller + ) entities_to_create = zha_data.platforms[Platform.UPDATE] - coordinator = ZHAFirmwareUpdateCoordinator( - hass, get_zha_gateway(hass).application_controller - ) - unsub = async_dispatcher_connect( hass, SIGNAL_ADD_ENTITIES, functools.partial( - discovery.async_add_entities, + zha_async_add_entities, async_add_entities, + ZHAFirmwareUpdateEntity, entities_to_create, - coordinator=coordinator, ), ) config_entry.async_on_unload(unsub) @@ -93,14 +85,11 @@ class ZHAFirmwareUpdateCoordinator(DataUpdateCoordinator[None]): # pylint: disa await self.controller_application.ota.broadcast_notify(jitter=100) -@CONFIG_DIAGNOSTIC_MATCH(cluster_handler_names=CLUSTER_HANDLER_OTA) class ZHAFirmwareUpdateEntity( - ZhaEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity + ZHAEntity, CoordinatorEntity[ZHAFirmwareUpdateCoordinator], UpdateEntity ): """Representation of a ZHA firmware update entity.""" - _unique_id_suffix = "firmware_update" - _attr_entity_category = EntityCategory.CONFIG _attr_device_class = UpdateDeviceClass.FIRMWARE _attr_supported_features = ( UpdateEntityFeature.INSTALL @@ -108,113 +97,70 @@ class ZHAFirmwareUpdateEntity( | UpdateEntityFeature.SPECIFIC_VERSION ) - def __init__( - self, - unique_id: str, - zha_device: ZHADevice, - channels: list[ClusterHandler], - coordinator: ZHAFirmwareUpdateCoordinator, - **kwargs: Any, - ) -> None: - """Initialize the ZHA update entity.""" - super().__init__(unique_id, zha_device, channels, **kwargs) - CoordinatorEntity.__init__(self, coordinator) + def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: + """Initialize the ZHA siren.""" + zha_data = get_zha_data(entity_data.device_proxy.gateway_proxy.hass) + assert zha_data.update_coordinator is not None - self._ota_cluster_handler: ClusterHandler = self.cluster_handlers[ - CLUSTER_HANDLER_OTA - ] - self._attr_installed_version: str | None = self._get_cluster_version() - self._attr_latest_version = self._attr_installed_version - self._latest_firmware: OtaImageWithMetadata | None = None + super().__init__(entity_data, coordinator=zha_data.update_coordinator, **kwargs) + CoordinatorEntity.__init__(self, zha_data.update_coordinator) - def _get_cluster_version(self) -> str | None: - """Synchronize current file version with the cluster.""" + @property + def installed_version(self) -> str | None: + """Version installed and in use.""" + return self.entity_data.entity.installed_version - if self._ota_cluster_handler.current_file_version is not None: - return f"0x{self._ota_cluster_handler.current_file_version:08x}" + @property + def in_progress(self) -> bool | int | None: + """Update installation progress. - return None + Needs UpdateEntityFeature.PROGRESS flag to be set for it to be used. - @callback - def attribute_updated(self, attrid: int, name: str, value: Any) -> None: - """Handle attribute updates on the OTA cluster.""" - if attrid == Ota.AttributeDefs.current_file_version.id: - self._attr_installed_version = f"0x{value:08x}" - self.async_write_ha_state() + Can either return a boolean (True if in progress, False if not) + or an integer to indicate the progress in from 0 to 100%. + """ + if not self.entity_data.entity.in_progress: + return self.entity_data.entity.in_progress - @callback - def device_ota_update_available( - self, image: OtaImageWithMetadata, current_file_version: int - ) -> None: - """Handle ota update available signal from Zigpy.""" - self._latest_firmware = image - self._attr_latest_version = f"0x{image.version:08x}" - self._attr_installed_version = f"0x{current_file_version:08x}" + # Stay in an indeterminate state until we actually send something + if self.entity_data.entity.progress == 0: + return True - if image.metadata.changelog: - self._attr_release_summary = image.metadata.changelog + # Rescale 0-100% to 2-100% to avoid 0 and 1 colliding with None, False, and True + return int(math.ceil(2 + 98 * self.entity_data.entity.progress / 100)) - self.async_write_ha_state() + @property + def latest_version(self) -> str | None: + """Latest version available for install.""" + return self.entity_data.entity.latest_version - @callback - def _update_progress(self, current: int, total: int, progress: float) -> None: - """Update install progress on event.""" - # If we are not supposed to be updating, do nothing - if self._attr_in_progress is False: - return + @property + def release_summary(self) -> str | None: + """Summary of the release notes or changelog. - # Remap progress to 2-100 to avoid 0 and 1 - self._attr_in_progress = int(math.ceil(2 + 98 * progress / 100)) - self.async_write_ha_state() + This is not suitable for long changelogs, but merely suitable + for a short excerpt update description of max 255 characters. + """ + return self.entity_data.entity.release_summary + @property + def release_url(self) -> str | None: + """URL to the full release notes of the latest version available.""" + return self.entity_data.entity.release_url + + # We explicitly convert ZHA exceptions to HA exceptions here so there is no need to + # use the `@convert_zha_error_to_ha_error` decorator. async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: """Install an update.""" - assert self._latest_firmware is not None - - # Set the progress to an indeterminate state - self._attr_in_progress = True - self.async_write_ha_state() - try: - result = await self.zha_device.device.update_firmware( - image=self._latest_firmware, - progress_callback=self._update_progress, - ) - except Exception as ex: - raise HomeAssistantError(f"Update was not successful: {ex}") from ex - - # If we tried to install firmware that is no longer compatible with the device, - # bail out - if result == Status.NO_IMAGE_AVAILABLE: - self._attr_latest_version = self._attr_installed_version + await self.entity_data.entity.async_install(version=version, backup=backup) + except ZHAException as exc: + raise HomeAssistantError(exc) from exc + finally: self.async_write_ha_state() - # If the update finished but was not successful, we should also throw an error - if result != Status.SUCCESS: - raise HomeAssistantError(f"Update was not successful: {result}") - - # Clear the state - self._latest_firmware = None - self._attr_in_progress = False - self.async_write_ha_state() - - async def async_added_to_hass(self) -> None: - """Call when entity is added.""" - await super().async_added_to_hass() - - # OTA events are sent by the device - self.zha_device.device.add_listener(self) - self.async_accept_signal( - self._ota_cluster_handler, SIGNAL_ATTR_UPDATED, self.attribute_updated - ) - - async def async_will_remove_from_hass(self) -> None: - """Call when entity will be removed.""" - await super().async_will_remove_from_hass() - self._attr_in_progress = False - async def async_update(self) -> None: """Update the entity.""" await CoordinatorEntity.async_update(self) diff --git a/homeassistant/components/zha/websocket_api.py b/homeassistant/components/zha/websocket_api.py index cb95e930b1a..0d4296e4b22 100644 --- a/homeassistant/components/zha/websocket_api.py +++ b/homeassistant/components/zha/websocket_api.py @@ -7,28 +7,7 @@ import logging from typing import TYPE_CHECKING, Any, Literal, NamedTuple, cast import voluptuous as vol -import zigpy.backups -from zigpy.config import CONF_DEVICE -from zigpy.config.validators import cv_boolean -from zigpy.types.named import EUI64, KeyData -from zigpy.zcl.clusters.security import IasAce -import zigpy.zdo.types as zdo_types - -from homeassistant.components import websocket_api -from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME -from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.service import async_register_admin_service -from homeassistant.helpers.typing import VolDictType, VolSchemaType - -from .api import ( - async_change_channel, - async_get_active_network_settings, - async_get_radio_type, -) -from .core.const import ( +from zha.application.const import ( ATTR_ARGS, ATTR_ATTRIBUTE, ATTR_CLUSTER_ID, @@ -47,13 +26,51 @@ from .core.const import ( ATTR_WARNING_DEVICE_STROBE, ATTR_WARNING_DEVICE_STROBE_DUTY_CYCLE, ATTR_WARNING_DEVICE_STROBE_INTENSITY, - BINDINGS, CLUSTER_COMMAND_SERVER, CLUSTER_COMMANDS_CLIENT, CLUSTER_COMMANDS_SERVER, - CLUSTER_HANDLER_IAS_WD, CLUSTER_TYPE_IN, CLUSTER_TYPE_OUT, + WARNING_DEVICE_MODE_EMERGENCY, + WARNING_DEVICE_SOUND_HIGH, + WARNING_DEVICE_SQUAWK_MODE_ARMED, + WARNING_DEVICE_STROBE_HIGH, + WARNING_DEVICE_STROBE_YES, + ZHA_CLUSTER_HANDLER_MSG, +) +from zha.application.gateway import Gateway +from zha.application.helpers import ( + async_is_bindable_target, + convert_install_code, + get_matched_clusters, + qr_to_install_code, +) +from zha.zigbee.cluster_handlers.const import CLUSTER_HANDLER_IAS_WD +from zha.zigbee.device import Device +from zha.zigbee.group import GroupMemberReference +import zigpy.backups +from zigpy.config import CONF_DEVICE +from zigpy.config.validators import cv_boolean +from zigpy.types.named import EUI64, KeyData +from zigpy.zcl.clusters.security import IasAce +import zigpy.zdo.types as zdo_types + +from homeassistant.components import websocket_api +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_COMMAND, ATTR_ID, ATTR_NAME +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.helpers import entity_registry as er +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.service import async_register_admin_service +from homeassistant.helpers.typing import VolDictType, VolSchemaType + +from .api import ( + async_change_channel, + async_get_active_network_settings, + async_get_radio_type, +) +from .const import ( CUSTOM_CONFIGURATION, DOMAIN, EZSP_OVERWRITE_EUI64, @@ -61,33 +78,24 @@ from .core.const import ( GROUP_IDS, GROUP_NAME, MFG_CLUSTER_ID_START, - WARNING_DEVICE_MODE_EMERGENCY, - WARNING_DEVICE_SOUND_HIGH, - WARNING_DEVICE_SQUAWK_MODE_ARMED, - WARNING_DEVICE_STROBE_HIGH, - WARNING_DEVICE_STROBE_YES, ZHA_ALARM_OPTIONS, - ZHA_CLUSTER_HANDLER_MSG, - ZHA_CONFIG_SCHEMAS, + ZHA_OPTIONS, ) -from .core.gateway import EntityReference -from .core.group import GroupMember -from .core.helpers import ( +from .helpers import ( + CONF_ZHA_ALARM_SCHEMA, + CONF_ZHA_OPTIONS_SCHEMA, + EntityReference, + ZHAGatewayProxy, async_cluster_exists, - async_is_bindable_target, cluster_command_schema_to_vol_schema, - convert_install_code, - get_matched_clusters, + get_config_entry, get_zha_gateway, - qr_to_install_code, + get_zha_gateway_proxy, ) if TYPE_CHECKING: from homeassistant.components.websocket_api.connection import ActiveConnection - from .core.device import ZHADevice - from .core.gateway import ZHAGateway - _LOGGER = logging.getLogger(__name__) TYPE = "type" @@ -105,6 +113,8 @@ ATTR_SOURCE_IEEE = "source_ieee" ATTR_TARGET_IEEE = "target_ieee" ATTR_QR_CODE = "qr_code" +BINDINGS = "bindings" + SERVICE_PERMIT = "permit" SERVICE_REMOVE = "remove" SERVICE_SET_ZIGBEE_CLUSTER_ATTRIBUTE = "set_zigbee_cluster_attribute" @@ -234,6 +244,12 @@ SERVICE_SCHEMAS: dict[str, VolSchemaType] = { } +ZHA_CONFIG_SCHEMAS = { + ZHA_OPTIONS: CONF_ZHA_OPTIONS_SCHEMA, + ZHA_ALARM_OPTIONS: CONF_ZHA_ALARM_SCHEMA, +} + + class ClusterBinding(NamedTuple): """Describes a cluster binding.""" @@ -243,9 +259,9 @@ class ClusterBinding(NamedTuple): endpoint_id: int -def _cv_group_member(value: dict[str, Any]) -> GroupMember: +def _cv_group_member(value: dict[str, Any]) -> GroupMemberReference: """Transform a group member.""" - return GroupMember( + return GroupMemberReference( ieee=value[ATTR_IEEE], endpoint_id=value[ATTR_ENDPOINT_ID], ) @@ -306,7 +322,7 @@ async def websocket_permit_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Permit ZHA zigbee devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) duration: int = msg[ATTR_DURATION] ieee: EUI64 | None = msg.get(ATTR_IEEE) @@ -321,28 +337,30 @@ async def websocket_permit_devices( @callback def async_cleanup() -> None: """Remove signal listener and turn off debug mode.""" - zha_gateway.async_disable_debug_mode() + zha_gateway_proxy.async_disable_debug_mode() remove_dispatcher_function() connection.subscriptions[msg["id"]] = async_cleanup - zha_gateway.async_enable_debug_mode() + zha_gateway_proxy.async_enable_debug_mode() src_ieee: EUI64 link_key: KeyData if ATTR_SOURCE_IEEE in msg: src_ieee = msg[ATTR_SOURCE_IEEE] link_key = msg[ATTR_INSTALL_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway.application_controller.permit_with_link_key( + await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) elif ATTR_QR_CODE in msg: src_ieee, link_key = msg[ATTR_QR_CODE] _LOGGER.debug("Allowing join for %s device with link key", src_ieee) - await zha_gateway.application_controller.permit_with_link_key( + await zha_gateway_proxy.gateway.application_controller.permit_with_link_key( time_s=duration, node=src_ieee, link_key=link_key ) else: - await zha_gateway.application_controller.permit(time_s=duration, node=ieee) + await zha_gateway_proxy.gateway.application_controller.permit( + time_s=duration, node=ieee + ) connection.send_result(msg[ID]) @@ -353,26 +371,26 @@ async def websocket_get_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway = get_zha_gateway(hass) - devices = [device.zha_device_info for device in zha_gateway.devices.values()] + zha_gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + devices = [ + device.zha_device_info for device in zha_gateway_proxy.device_proxies.values() + ] connection.send_result(msg[ID], devices) @callback -def _get_entity_name( - zha_gateway: ZHAGateway, entity_ref: EntityReference -) -> str | None: +def _get_entity_name(zha_gateway: Gateway, entity_ref: EntityReference) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.reference_id) + entry = entity_registry.async_get(entity_ref.ha_entity_id) return entry.name if entry else None @callback def _get_entity_original_name( - zha_gateway: ZHAGateway, entity_ref: EntityReference + zha_gateway: Gateway, entity_ref: EntityReference ) -> str | None: entity_registry = er.async_get(zha_gateway.hass) - entry = entity_registry.async_get(entity_ref.reference_id) + entry = entity_registry.async_get(entity_ref.ha_entity_id) return entry.original_name if entry else None @@ -383,32 +401,36 @@ async def websocket_get_groupable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices that can be grouped.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) - devices = [device for device in zha_gateway.devices.values() if device.is_groupable] + devices = [ + device + for device in zha_gateway_proxy.device_proxies.values() + if device.device.is_groupable + ] groupable_devices: list[dict[str, Any]] = [] for device in devices: - entity_refs = zha_gateway.device_registry[device.ieee] + entity_refs = zha_gateway_proxy.ha_entity_refs[device.device.ieee] groupable_devices.extend( { "endpoint_id": ep_id, "entities": [ { - "name": _get_entity_name(zha_gateway, entity_ref), + "name": _get_entity_name(zha_gateway_proxy, entity_ref), "original_name": _get_entity_original_name( - zha_gateway, entity_ref + zha_gateway_proxy, entity_ref ), } for entity_ref in entity_refs - if list(entity_ref.cluster_handlers.values())[ + if list(entity_ref.entity_data.entity.cluster_handlers.values())[ 0 ].cluster.endpoint.endpoint_id == ep_id ], "device": device.zha_device_info, } - for ep_id in device.async_get_groupable_endpoints() + for ep_id in device.device.async_get_groupable_endpoints() ) connection.send_result(msg[ID], groupable_devices) @@ -421,8 +443,8 @@ async def websocket_get_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA groups.""" - zha_gateway = get_zha_gateway(hass) - groups = [group.group_info for group in zha_gateway.groups.values()] + zha_gateway_proxy = get_zha_gateway_proxy(hass) + groups = [group.group_info for group in zha_gateway_proxy.group_proxies.values()] connection.send_result(msg[ID], groups) @@ -438,10 +460,10 @@ async def websocket_get_device( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) ieee: EUI64 = msg[ATTR_IEEE] - if not (zha_device := zha_gateway.devices.get(ieee)): + if not (zha_device := zha_gateway_proxy.device_proxies.get(ieee)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Device not found" @@ -465,10 +487,10 @@ async def websocket_get_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA group.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - if not (zha_group := zha_gateway.groups.get(group_id)): + if not (zha_group := zha_gateway_proxy.group_proxies.get(group_id)): connection.send_message( websocket_api.error_message( msg[ID], websocket_api.ERR_NOT_FOUND, "ZHA Group not found" @@ -494,13 +516,17 @@ async def websocket_add_group( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Add a new ZHA group.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway = get_zha_gateway_proxy(hass) group_name: str = msg[GROUP_NAME] group_id: int | None = msg.get(GROUP_ID) - members: list[GroupMember] | None = msg.get(ATTR_MEMBERS) - group = await zha_gateway.async_create_zigpy_group(group_name, members, group_id) + members: list[GroupMemberReference] | None = msg.get(ATTR_MEMBERS) + group = await zha_gateway.gateway.async_create_zigpy_group( + group_name, members, group_id + ) assert group - connection.send_result(msg[ID], group.group_info) + connection.send_result( + msg[ID], zha_gateway.group_proxies[group.group_id].group_info + ) @websocket_api.require_admin @@ -515,17 +541,18 @@ async def websocket_remove_groups( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Remove the specified ZHA groups.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway = get_zha_gateway_proxy(hass) group_ids: list[int] = msg[GROUP_IDS] if len(group_ids) > 1: tasks = [ - zha_gateway.async_remove_zigpy_group(group_id) for group_id in group_ids + zha_gateway.gateway.async_remove_zigpy_group(group_id) + for group_id in group_ids ] await asyncio.gather(*tasks) else: - await zha_gateway.async_remove_zigpy_group(group_ids[0]) - ret_groups = [group.group_info for group in zha_gateway.groups.values()] + await zha_gateway.gateway.async_remove_zigpy_group(group_ids[0]) + ret_groups = [group.group_info for group in zha_gateway.group_proxies.values()] connection.send_result(msg[ID], ret_groups) @@ -543,8 +570,9 @@ async def websocket_add_group_members( ) -> None: """Add members to a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -555,8 +583,9 @@ async def websocket_add_group_members( return await zha_group.async_add_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin @@ -573,8 +602,9 @@ async def websocket_remove_group_members( ) -> None: """Remove members from a ZHA group.""" zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) group_id: int = msg[GROUP_ID] - members: list[GroupMember] = msg[ATTR_MEMBERS] + members: list[GroupMemberReference] = msg[ATTR_MEMBERS] if not (zha_group := zha_gateway.groups.get(group_id)): connection.send_message( @@ -585,8 +615,9 @@ async def websocket_remove_group_members( return await zha_group.async_remove_members(members) - ret_group = zha_group.group_info - connection.send_result(msg[ID], ret_group) + ret_group = zha_gateway_proxy.get_group_proxy(group_id) + assert ret_group + connection.send_result(msg[ID], ret_group.group_info) @websocket_api.require_admin @@ -603,7 +634,7 @@ async def websocket_reconfigure_node( """Reconfigure a ZHA nodes entities by its ieee address.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = msg[ATTR_IEEE] - device: ZHADevice | None = zha_gateway.get_device(ieee) + device: Device | None = zha_gateway.get_device(ieee) async def forward_messages(data): """Forward events to websocket.""" @@ -865,14 +896,15 @@ async def websocket_get_bindable_devices( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Directly bind devices.""" - zha_gateway = get_zha_gateway(hass) + zha_gateway_proxy = get_zha_gateway_proxy(hass) source_ieee: EUI64 = msg[ATTR_IEEE] - source_device = zha_gateway.get_device(source_ieee) + source_device = zha_gateway_proxy.device_proxies.get(source_ieee) + assert source_device is not None devices = [ device.zha_device_info - for device in zha_gateway.devices.values() - if async_is_bindable_target(source_device, device) + for device in zha_gateway_proxy.device_proxies.values() + if async_is_bindable_target(source_device.device, device.device) ] _LOGGER.debug( @@ -993,7 +1025,7 @@ async def websocket_unbind_group( async def async_binding_operation( - zha_gateway: ZHAGateway, + zha_gateway: Gateway, source_ieee: EUI64, target_ieee: EUI64, operation: zdo_types.ZDOCmd, @@ -1047,7 +1079,7 @@ async def websocket_get_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Get ZHA configuration.""" - zha_gateway = get_zha_gateway(hass) + config_entry: ConfigEntry = get_config_entry(hass) import voluptuous_serialize # pylint: disable=import-outside-toplevel def custom_serializer(schema: Any) -> Any: @@ -1070,9 +1102,9 @@ async def websocket_get_configuration( data["schemas"][section] = voluptuous_serialize.convert( schema, custom_serializer=custom_serializer ) - data["data"][section] = zha_gateway.config_entry.options.get( - CUSTOM_CONFIGURATION, {} - ).get(section, {}) + data["data"][section] = config_entry.options.get(CUSTOM_CONFIGURATION, {}).get( + section, {} + ) # send default values for unconfigured options for entry in data["schemas"][section]: @@ -1094,8 +1126,8 @@ async def websocket_update_zha_configuration( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Update the ZHA configuration.""" - zha_gateway = get_zha_gateway(hass) - options = zha_gateway.config_entry.options + config_entry: ConfigEntry = get_config_entry(hass) + options = config_entry.options data_to_save = {**options, CUSTOM_CONFIGURATION: msg["data"]} for section, schema in ZHA_CONFIG_SCHEMAS.items(): @@ -1126,10 +1158,8 @@ async def websocket_update_zha_configuration( data_to_save, ) - hass.config_entries.async_update_entry( - zha_gateway.config_entry, options=data_to_save - ) - status = await hass.config_entries.async_reload(zha_gateway.config_entry.entry_id) + hass.config_entries.async_update_entry(config_entry, options=data_to_save) + status = await hass.config_entries.async_reload(config_entry.entry_id) connection.send_result(msg[ID], status) @@ -1142,10 +1172,11 @@ async def websocket_get_network_settings( """Get ZHA network settings.""" backup = async_get_active_network_settings(hass) zha_gateway = get_zha_gateway(hass) + config_entry: ConfigEntry = get_config_entry(hass) connection.send_result( msg[ID], { - "radio_type": async_get_radio_type(hass, zha_gateway.config_entry).name, + "radio_type": async_get_radio_type(hass, config_entry).name, "device": zha_gateway.application_controller.config[CONF_DEVICE], "settings": backup.as_dict(), }, @@ -1280,12 +1311,8 @@ def async_load_api(hass: HomeAssistant) -> None: """Remove a node from the network.""" zha_gateway = get_zha_gateway(hass) ieee: EUI64 = service.data[ATTR_IEEE] - zha_device: ZHADevice | None = zha_gateway.get_device(ieee) - if zha_device is not None and zha_device.is_active_coordinator: - _LOGGER.info("Removing the coordinator (%s) is not allowed", ieee) - return _LOGGER.info("Removing node %s", ieee) - await application_controller.remove(ieee) + await zha_gateway.async_remove_device(ieee) async_register_admin_service( hass, DOMAIN, SERVICE_REMOVE, remove, schema=SERVICE_SCHEMAS[IEEE_SERVICE] diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 6798e644a02..6e750ee8b2d 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import asdict, dataclass, field from enum import StrEnum from typing import TYPE_CHECKING, Any, cast from awesomeversion import AwesomeVersion -from typing_extensions import Generator from zwave_js_server.const import ( CURRENT_STATE_PROPERTY, CURRENT_VALUE_PROPERTY, diff --git a/homeassistant/components/zwave_js/fan.py b/homeassistant/components/zwave_js/fan.py index 925a48512d8..37d3fc57886 100644 --- a/homeassistant/components/zwave_js/fan.py +++ b/homeassistant/components/zwave_js/fan.py @@ -78,7 +78,12 @@ async def async_setup_entry( class ZwaveFan(ZWaveBaseEntity, FanEntity): """Representation of a Z-Wave fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False def __init__( self, config_entry: ConfigEntry, driver: Driver, info: ZwaveDiscoveryInfo @@ -249,7 +254,11 @@ class ValueMappingZwaveFan(ZwaveFan): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - flags = FanEntityFeature.SET_SPEED + flags = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) if self.has_fan_value_mapping and self.fan_value_mapping.presets: flags |= FanEntityFeature.PRESET_MODE @@ -382,7 +391,13 @@ class ZwaveThermostatFan(ZWaveBaseEntity, FanEntity): @property def supported_features(self) -> FanEntityFeature: """Flag supported features.""" - return FanEntityFeature.PRESET_MODE + if not self._fan_off: + return FanEntityFeature.PRESET_MODE + return ( + FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + ) @property def fan_state(self) -> str | None: diff --git a/homeassistant/components/zwave_js/lock.py b/homeassistant/components/zwave_js/lock.py index 5eb89e17402..b16c1090ef3 100644 --- a/homeassistant/components/zwave_js/lock.py +++ b/homeassistant/components/zwave_js/lock.py @@ -196,15 +196,19 @@ class ZWaveLock(ZWaveBaseEntity, LockEntity): ) -> None: """Set the lock configuration.""" params: dict[str, Any] = {"operation_type": operation_type} - for attr, val in ( - ("lock_timeout_configuration", lock_timeout), - ("auto_relock_time", auto_relock_time), - ("hold_and_release_time", hold_and_release_time), - ("twist_assist", twist_assist), - ("block_to_block", block_to_block), - ): - if val is not None: - params[attr] = val + params.update( + { + attr: val + for attr, val in ( + ("lock_timeout_configuration", lock_timeout), + ("auto_relock_time", auto_relock_time), + ("hold_and_release_time", hold_and_release_time), + ("twist_assist", twist_assist), + ("block_to_block", block_to_block), + ) + if val is not None + } + ) configuration = DoorLockCCConfigurationSetOptions(**params) result = await set_configuration( self.info.node.endpoints[self.info.primary_value.endpoint or 0], diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index 66d09714723..e5c0bd64781 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -3,12 +3,11 @@ from __future__ import annotations import asyncio -from collections.abc import Collection, Sequence +from collections.abc import Collection, Generator, Sequence import logging import math from typing import Any -from typing_extensions import Generator import voluptuous as vol from zwave_js_server.client import Client as ZwaveClient from zwave_js_server.const import SET_VALUE_SUCCESS, CommandClass, CommandStatus diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 7c65f1804b1..4bba3e0538c 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -291,7 +291,7 @@ "name": "Clear lock user code" }, "invoke_cc_api": { - "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` service and require direct calls to the Command Class API.", + "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { "command_class": { "description": "The ID of the command class that you want to issue a command to.", @@ -313,7 +313,7 @@ "name": "Invoke a Command Class API on a node (advanced)" }, "multicast_set_value": { - "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", @@ -475,7 +475,7 @@ "name": "Set lock user code" }, "set_value": { - "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This service has minimal validation so only use this service if you know what you are doing.", + "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { "command_class": { "description": "The ID of the command class for the value.", @@ -502,7 +502,7 @@ "name": "[%key:component::zwave_js::services::set_config_parameter::fields::value::name%]" }, "wait_for_result": { - "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the service call can take a while if setting a value on an asleep battery device.", + "description": "Whether or not to wait for a response from the node. If not included in the payload, the integration will decide whether to wait or not. If set to `true`, note that the action can take a while if setting a value on an asleep battery device.", "name": "Wait for result?" } }, diff --git a/homeassistant/components/zwave_js/triggers/value_updated.py b/homeassistant/components/zwave_js/triggers/value_updated.py index 4814eba0757..d8c5702ce5d 100644 --- a/homeassistant/components/zwave_js/triggers/value_updated.py +++ b/homeassistant/components/zwave_js/triggers/value_updated.py @@ -128,14 +128,9 @@ async def async_attach_trigger( (prev_value, prev_value_raw, from_value), (curr_value, curr_value_raw, to_value), ): - if ( - match != MATCH_ALL - and value_to_eval != match - and not ( - isinstance(match, list) - and (value_to_eval in match or raw_value_to_eval in match) - ) - and raw_value_to_eval != match + if match not in (MATCH_ALL, value_to_eval, raw_value_to_eval) and not ( + isinstance(match, list) + and (value_to_eval in match or raw_value_to_eval in match) ): return diff --git a/homeassistant/components/zwave_me/fan.py b/homeassistant/components/zwave_me/fan.py index 25ccec9a0fb..b8a4b5e4ad2 100644 --- a/homeassistant/components/zwave_me/fan.py +++ b/homeassistant/components/zwave_me/fan.py @@ -44,7 +44,12 @@ async def async_setup_entry( class ZWaveMeFan(ZWaveMeEntity, FanEntity): """Representation of a ZWaveMe Fan.""" - _attr_supported_features = FanEntityFeature.SET_SPEED + _attr_supported_features = ( + FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + _enable_turn_on_off_backwards_compatibility = False @property def percentage(self) -> int: diff --git a/homeassistant/config.py b/homeassistant/config.py index ff679d4df51..18c833d4c75 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -60,7 +60,7 @@ from .const import ( LEGACY_CONF_WHITELIST_EXTERNAL_DIRS, __version__, ) -from .core import DOMAIN as HA_DOMAIN, ConfigSource, HomeAssistant, callback +from .core import DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, callback from .exceptions import ConfigValidationError, HomeAssistantError from .generated.currencies import HISTORIC_CURRENCIES from .helpers import config_validation as cv, issue_registry as ir @@ -261,12 +261,12 @@ CUSTOMIZE_CONFIG_SCHEMA = vol.Schema( def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None: if currency not in HISTORIC_CURRENCIES: - ir.async_delete_issue(hass, HA_DOMAIN, "historic_currency") + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "historic_currency") return ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, "historic_currency", is_fixable=False, learn_more_url="homeassistant://config/general", @@ -278,12 +278,12 @@ def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> Non def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None: if country is not None: - ir.async_delete_issue(hass, HA_DOMAIN, "country_not_configured") + ir.async_delete_issue(hass, HOMEASSISTANT_DOMAIN, "country_not_configured") return ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, "country_not_configured", is_fixable=False, learn_more_url="homeassistant://config/general", @@ -481,12 +481,14 @@ async def async_hass_config_yaml(hass: HomeAssistant) -> dict: for invalid_domain in invalid_domains: config.pop(invalid_domain) - core_config = config.get(HA_DOMAIN, {}) + core_config = config.get(HOMEASSISTANT_DOMAIN, {}) try: await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {})) except vol.Invalid as exc: suffix = "" - if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, *exc.path]): + if annotation := find_annotation( + config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, *exc.path] + ): suffix = f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error( "Invalid package configuration '%s'%s: %s", CONF_PACKAGES, suffix, exc @@ -709,7 +711,7 @@ def stringify_invalid( ) else: message_prefix = f"Invalid config for '{domain}'" - if domain != HA_DOMAIN and link: + if domain != HOMEASSISTANT_DOMAIN and link: message_suffix = f", please check the docs at {link}" else: message_suffix = "" @@ -792,7 +794,7 @@ def format_homeassistant_error( if annotation := find_annotation(config, [domain]): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" message = f"{message_prefix}: {str(exc) or repr(exc)}" - if domain != HA_DOMAIN and link: + if domain != HOMEASSISTANT_DOMAIN and link: message += f", please check the docs at {link}" return message @@ -815,7 +817,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non This method is a coroutine. """ - config = CORE_CONFIG_SCHEMA(config) + # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir + # so we need to run it in an executor job. + config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) # Only load auth during startup. if not hasattr(hass, "auth"): @@ -914,7 +918,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB]) for name, pkg in config[CONF_PACKAGES].items(): - if (pkg_cust := pkg.get(HA_DOMAIN)) is None: + if (pkg_cust := pkg.get(HOMEASSISTANT_DOMAIN)) is None: continue try: @@ -938,7 +942,9 @@ def _log_pkg_error( ) -> None: """Log an error while merging packages.""" message_prefix = f"Setup of package '{package}'" - if annotation := find_annotation(config, [HA_DOMAIN, CONF_PACKAGES, package]): + if annotation := find_annotation( + config, [HOMEASSISTANT_DOMAIN, CONF_PACKAGES, package] + ): message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}" _LOGGER.error("%s failed: %s", message_prefix, message) @@ -947,7 +953,7 @@ def _log_pkg_error( def _identify_config_schema(module: ComponentProtocol) -> str | None: """Extract the schema and identify list or dict based.""" if not isinstance(module.CONFIG_SCHEMA, vol.Schema): - return None + return None # type: ignore[unreachable] schema = module.CONFIG_SCHEMA.schema @@ -1053,7 +1059,7 @@ async def merge_packages_config( continue for comp_name, comp_conf in pack_conf.items(): - if comp_name == HA_DOMAIN: + if comp_name == HOMEASSISTANT_DOMAIN: continue try: domain = cv.domain_key(comp_name) @@ -1198,7 +1204,7 @@ def _get_log_message_and_stack_print_pref( # Generate the log message from the English translations log_message = async_get_exception_message( - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, platform_exception.translation_key, translation_placeholders=placeholders, ) @@ -1259,7 +1265,7 @@ def async_drop_config_annotations( # Don't drop annotations from the homeassistant integration because it may # have configuration for other integrations as packages. - if integration.domain in config and integration.domain != HA_DOMAIN: + if integration.domain in config and integration.domain != HOMEASSISTANT_DOMAIN: drop_config_annotations_rec(config[integration.domain]) return config @@ -1311,7 +1317,7 @@ def async_handle_component_errors( raise ConfigValidationError( translation_key, [platform_exception.exception for platform_exception in config_exception_info], - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, translation_placeholders=placeholders, ) @@ -1529,9 +1535,15 @@ async def async_process_component_config( return IntegrationConfigInfo(None, config_exceptions) # No custom config validator, proceed with schema validation - if hasattr(component, "CONFIG_SCHEMA"): + if config_schema := getattr(component, "CONFIG_SCHEMA", None): try: - return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) + if domain in config: + # cv.isdir, cv.isfile, cv.isdevice are not async + # friendly so we need to run this in executor + schema = await hass.async_add_executor_job(config_schema, config) + else: + schema = config_schema(config) + return IntegrationConfigInfo(schema, []) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index c8d671e1fe1..aa0113cd7ce 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -4,9 +4,18 @@ from __future__ import annotations import asyncio from collections import UserDict -from collections.abc import Callable, Coroutine, Hashable, Iterable, Mapping, ValuesView +from collections.abc import ( + Callable, + Coroutine, + Generator, + Hashable, + Iterable, + Mapping, + ValuesView, +) from contextvars import ContextVar from copy import deepcopy +from datetime import datetime from enum import Enum, StrEnum import functools from functools import cached_property @@ -16,14 +25,14 @@ from types import MappingProxyType from typing import TYPE_CHECKING, Any, Generic, Self, cast from async_interrupt import interrupt -from typing_extensions import Generator, TypeVar +from typing_extensions import TypeVar from . import data_entry_flow, loader from .components import persistent_notification from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform from .core import ( CALLBACK_TYPE, - DOMAIN as HA_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, Event, HassJob, @@ -61,6 +70,7 @@ from .setup import ( from .util import ulid as ulid_util from .util.async_ import create_eager_task from .util.decorator import Registry +from .util.dt import utc_from_timestamp, utcnow from .util.enum import try_parse_enum if TYPE_CHECKING: @@ -110,7 +120,7 @@ HANDLERS: Registry[str, type[ConfigFlow]] = Registry() STORAGE_KEY = "core.config_entries" STORAGE_VERSION = 1 -STORAGE_VERSION_MINOR = 2 +STORAGE_VERSION_MINOR = 3 SAVE_DELAY = 1 @@ -295,15 +305,19 @@ class ConfigEntry(Generic[_DataT]): _background_tasks: set[asyncio.Future[Any]] _integration_for_domain: loader.Integration | None _tries: int + created_at: datetime + modified_at: datetime def __init__( self, *, + created_at: datetime | None = None, data: Mapping[str, Any], disabled_by: ConfigEntryDisabler | None = None, domain: str, entry_id: str | None = None, minor_version: int, + modified_at: datetime | None = None, options: Mapping[str, Any] | None, pref_disable_new_entities: bool | None = None, pref_disable_polling: bool | None = None, @@ -407,6 +421,8 @@ class ConfigEntry(Generic[_DataT]): _setter(self, "_integration_for_domain", None) _setter(self, "_tries", 0) + _setter(self, "created_at", created_at or utcnow()) + _setter(self, "modified_at", modified_at or utcnow()) def __repr__(self) -> str: """Representation of ConfigEntry.""" @@ -475,8 +491,10 @@ class ConfigEntry(Generic[_DataT]): def as_json_fragment(self) -> json_fragment: """Return JSON fragment of a config entry.""" json_repr = { + "created_at": self.created_at.timestamp(), "entry_id": self.entry_id, "domain": self.domain, + "modified_at": self.modified_at.timestamp(), "title": self.title, "source": self.source, "state": self.state.value, @@ -823,6 +841,10 @@ class ConfigEntry(Generic[_DataT]): async def async_remove(self, hass: HomeAssistant) -> None: """Invoke remove callback on component.""" + old_modified_at = self.modified_at + object.__setattr__(self, "modified_at", utcnow()) + self.clear_cache() + if self.source == SOURCE_IGNORE: return @@ -854,6 +876,8 @@ class ConfigEntry(Generic[_DataT]): self.title, integration.domain, ) + # Restore modified_at + object.__setattr__(self, "modified_at", old_modified_at) @callback def _async_set_state( @@ -942,11 +966,13 @@ class ConfigEntry(Generic[_DataT]): def as_dict(self) -> dict[str, Any]: """Return dictionary version of this entry.""" return { + "created_at": self.created_at.isoformat(), "data": dict(self.data), "disabled_by": self.disabled_by, "domain": self.domain, "entry_id": self.entry_id, "minor_version": self.minor_version, + "modified_at": self.modified_at.isoformat(), "options": dict(self.options), "pref_disable_new_entities": self.pref_disable_new_entities, "pref_disable_polling": self.pref_disable_polling, @@ -1041,7 +1067,7 @@ class ConfigEntry(Generic[_DataT]): issue_id = f"config_entry_reauth_{self.domain}_{self.entry_id}" ir.async_create_issue( hass, - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, issue_id, data={"flow_id": result["flow_id"]}, is_fixable=False, @@ -1246,7 +1272,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): flow_id=flow_id, handler=handler, reason="single_instance_allowed", - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, ) loop = self.hass.loop @@ -1335,7 +1361,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): entry := self.config_entries.async_get_entry(entry_id) ) is not None: issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY: return result @@ -1352,7 +1378,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): flow_id=flow.flow_id, handler=flow.handler, reason="single_instance_allowed", - translation_domain=HA_DOMAIN, + translation_domain=HOMEASSISTANT_DOMAIN, ) # Check if config entry exists with unique ID. Unload it. @@ -1591,25 +1617,34 @@ class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict[str, Any]: """Migrate to the new version.""" data = old_data - if old_major_version == 1 and old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entry in data["entries"]: - # Populate keys which were introduced before version 1.2 + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entry in data["entries"]: + # Populate keys which were introduced before version 1.2 - pref_disable_new_entities = entry.get("pref_disable_new_entities") - if pref_disable_new_entities is None and "system_options" in entry: - pref_disable_new_entities = entry.get("system_options", {}).get( - "disable_new_entities" + pref_disable_new_entities = entry.get("pref_disable_new_entities") + if pref_disable_new_entities is None and "system_options" in entry: + pref_disable_new_entities = entry.get("system_options", {}).get( + "disable_new_entities" + ) + + entry.setdefault("disabled_by", entry.get("disabled_by")) + entry.setdefault("minor_version", entry.get("minor_version", 1)) + entry.setdefault("options", entry.get("options", {})) + entry.setdefault( + "pref_disable_new_entities", pref_disable_new_entities ) + entry.setdefault( + "pref_disable_polling", entry.get("pref_disable_polling") + ) + entry.setdefault("unique_id", entry.get("unique_id")) - entry.setdefault("disabled_by", entry.get("disabled_by")) - entry.setdefault("minor_version", entry.get("minor_version", 1)) - entry.setdefault("options", entry.get("options", {})) - entry.setdefault("pref_disable_new_entities", pref_disable_new_entities) - entry.setdefault( - "pref_disable_polling", entry.get("pref_disable_polling") - ) - entry.setdefault("unique_id", entry.get("unique_id")) + if old_minor_version < 3: + # Version 1.3 adds the created_at and modified_at fields + created_at = utc_from_timestamp(0).isoformat() + for entry in data["entries"]: + entry["created_at"] = entry["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -1744,7 +1779,7 @@ class ConfigEntries: if "flow_id" in progress_flow: self.hass.config_entries.flow.async_abort(progress_flow["flow_id"]) issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}" - ir.async_delete_issue(self.hass, HA_DOMAIN, issue_id) + ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id) # After we have fully removed an "ignore" config entry we can try and rediscover # it so that a user is able to immediately start configuring it. We do this by @@ -1785,11 +1820,13 @@ class ConfigEntries: entry_id = entry["entry_id"] config_entry = ConfigEntry( + created_at=datetime.fromisoformat(entry["created_at"]), data=entry["data"], disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]), domain=entry["domain"], entry_id=entry_id, minor_version=entry["minor_version"], + modified_at=datetime.fromisoformat(entry["modified_at"]), options=entry["options"], pref_disable_new_entities=entry["pref_disable_new_entities"], pref_disable_polling=entry["pref_disable_polling"], @@ -2006,6 +2043,8 @@ class ConfigEntries: if not changed: return False + _setter(entry, "modified_at", utcnow()) + for listener in entry.update_listeners: self.hass.async_create_task( listener(self.hass, entry), diff --git a/homeassistant/const.py b/homeassistant/const.py index 71b7d79cb01..402f57a4f8b 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -23,8 +23,8 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "4" +MINOR_VERSION: Final = 8 +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) @@ -113,6 +113,7 @@ SUN_EVENT_SUNRISE: Final = "sunrise" # #### CONFIG #### CONF_ABOVE: Final = "above" CONF_ACCESS_TOKEN: Final = "access_token" +CONF_ACTION: Final = "action" CONF_ADDRESS: Final = "address" CONF_AFTER: Final = "after" CONF_ALIAS: Final = "alias" diff --git a/homeassistant/core.py b/homeassistant/core.py index c4392f62c52..5d223b9f19f 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -168,7 +168,7 @@ class EventStateEventData(TypedDict): class EventStateChangedData(EventStateEventData): """EVENT_STATE_CHANGED data. - A state changed event is fired when on state write when the state is changed. + A state changed event is fired when on state write the state is changed. """ old_state: State | None @@ -177,7 +177,7 @@ class EventStateChangedData(EventStateEventData): class EventStateReportedData(EventStateEventData): """EVENT_STATE_REPORTED data. - A state reported event is fired when on state write when the state is unchanged. + A state reported event is fired when on state write the state is unchanged. """ old_last_reported: datetime.datetime @@ -1308,6 +1308,11 @@ class EventOrigin(enum.Enum): """Return the event.""" return self.value + @cached_property + def idx(self) -> int: + """Return the index of the origin.""" + return next((idx for idx, origin in enumerate(EventOrigin) if origin is self)) + class Event(Generic[_DataT]): """Representation of an event within the bus.""" @@ -2238,16 +2243,45 @@ class StateMachine: This method must be run in the event loop. """ - new_state = str(new_state) - attributes = attributes or {} - old_state = self._states_data.get(entity_id) - if old_state is None: - # If the state is missing, try to convert the entity_id to lowercase - # and try again. - entity_id = entity_id.lower() - old_state = self._states_data.get(entity_id) + self.async_set_internal( + entity_id.lower(), + str(new_state), + attributes or {}, + force_update, + context, + state_info, + timestamp or time.time(), + ) - if old_state is None: + @callback + def async_set_internal( + self, + entity_id: str, + new_state: str, + attributes: Mapping[str, Any] | None, + force_update: bool, + context: Context | None, + state_info: StateInfo | None, + timestamp: float, + ) -> None: + """Set the state of an entity, add entity if it does not exist. + + This method is intended to only be used by core internally + and should not be considered a stable API. We will make + breaking changes to this function in the future and it + should not be used in integrations. + + This method must be run in the event loop. + """ + # Most cases the key will be in the dict + # so we optimize for the happy path as + # python 3.11+ has near zero overhead for + # try when it does not raise an exception. + old_state: State | None + try: + old_state = self._states_data[entity_id] + except KeyError: + old_state = None same_state = False same_attr = False last_changed = None @@ -2267,10 +2301,11 @@ class StateMachine: # timestamp implementation: # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6387 # https://github.com/python/cpython/blob/c90a862cdcf55dc1753c6466e5fa4a467a13ae24/Modules/_datetimemodule.c#L6323 - if timestamp is None: - timestamp = time.time() now = dt_util.utc_from_timestamp(timestamp) + if context is None: + context = Context(id=ulid_at_time(timestamp)) + if same_state and same_attr: # mypy does not understand this is only possible if old_state is not None old_last_reported = old_state.last_reported # type: ignore[union-attr] @@ -2289,9 +2324,6 @@ class StateMachine: ) return - if context is None: - context = Context(id=ulid_at_time(timestamp)) - if same_attr: if TYPE_CHECKING: assert old_state is not None diff --git a/homeassistant/data_entry_flow.py b/homeassistant/data_entry_flow.py index f632e3e4dde..b8e8f269b82 100644 --- a/homeassistant/data_entry_flow.py +++ b/homeassistant/data_entry_flow.py @@ -46,7 +46,7 @@ class FlowResultType(StrEnum): MENU = "menu" -# RESULT_TYPE_* is deprecated, to be removed in 2022.9 +# RESULT_TYPE_* is deprecated, to be removed in 2025.1 _DEPRECATED_RESULT_TYPE_FORM = DeprecatedConstantEnum(FlowResultType.FORM, "2025.1") _DEPRECATED_RESULT_TYPE_CREATE_ENTRY = DeprecatedConstantEnum( FlowResultType.CREATE_ENTRY, "2025.1" @@ -112,9 +112,7 @@ class UnknownStep(FlowError): """Unknown step specified.""" -# ignore misc is required as vol.Invalid is not typed -# mypy error: Class cannot subclass "Invalid" (has type "Any") -class InvalidData(vol.Invalid): # type: ignore[misc] +class InvalidData(vol.Invalid): """Invalid data provided.""" def __init__( @@ -386,7 +384,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): ) is not None and user_input is not None: data_schema = cast(vol.Schema, data_schema) try: - user_input = data_schema(user_input) # type: ignore[operator] + user_input = data_schema(user_input) except vol.Invalid as ex: raised_errors = [ex] if isinstance(ex, vol.MultipleInvalid): @@ -534,7 +532,7 @@ class FlowManager(abc.ABC, Generic[_FlowResultT, _HandlerT]): report( ( "does not use FlowResultType enum for data entry flow result type. " - "This is deprecated and will stop working in Home Assistant 2022.9" + "This is deprecated and will stop working in Home Assistant 2025.1" ), error_if_core=False, ) diff --git a/homeassistant/exceptions.py b/homeassistant/exceptions.py index 01e22d16e79..f308cbc5cd8 100644 --- a/homeassistant/exceptions.py +++ b/homeassistant/exceptions.py @@ -2,12 +2,10 @@ from __future__ import annotations -from collections.abc import Callable, Sequence +from collections.abc import Callable, Generator, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING, Any -from typing_extensions import Generator - from .util.event_type import EventType if TYPE_CHECKING: diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index c576f242e30..dc30f9d76f0 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -14,6 +14,7 @@ APPLICATION_CREDENTIALS = [ "google_tasks", "home_connect", "husqvarna_automower", + "iotty", "lametric", "lyric", "microbees", @@ -24,6 +25,7 @@ APPLICATION_CREDENTIALS = [ "netatmo", "senz", "spotify", + "tesla_fleet", "twitch", "withings", "xbox", diff --git a/homeassistant/generated/bluetooth.py b/homeassistant/generated/bluetooth.py index 17461225851..2ea604a91a2 100644 --- a/homeassistant/generated/bluetooth.py +++ b/homeassistant/generated/bluetooth.py @@ -137,6 +137,41 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "govee_ble", "local_name": "B5178*", }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5121*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5122*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5123*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5124*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5125*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GV5126*", + }, + { + "connectable": False, + "domain": "govee_ble", + "local_name": "GVH5127*", + }, { "connectable": False, "domain": "govee_ble", @@ -221,6 +256,22 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 19506, "service_uuid": "00001801-0000-1000-8000-00805f9b34fb", }, + { + "connectable": False, + "domain": "govee_ble", + "manufacturer_id": 61320, + }, + { + "connectable": False, + "domain": "govee_ble", + "manufacturer_data_start": [ + 236, + 0, + 0, + 1, + ], + "manufacturer_id": 34819, + }, { "domain": "homekit_controller", "manufacturer_data_start": [ @@ -270,6 +321,11 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "domain": "inkbird", "local_name": "tps", }, + { + "connectable": True, + "domain": "iron_os", + "service_uuid": "9eae1000-9d0d-48c5-aa55-33e27f9bc533", + }, { "connectable": False, "domain": "kegtron", @@ -624,6 +680,15 @@ BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [ "manufacturer_id": 27, "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", }, + { + "connectable": False, + "domain": "thermobeacon", + "manufacturer_data_start": [ + 0, + ], + "manufacturer_id": 48, + "service_uuid": "0000fff0-0000-1000-8000-00805f9b34fb", + }, { "connectable": False, "domain": "thermobeacon", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 0020dc91ccd..0c37cf9c412 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -9,6 +9,7 @@ FLOWS = { "generic_hygrostat", "generic_thermostat", "group", + "history_stats", "integration", "min_max", "random", @@ -68,6 +69,7 @@ FLOWS = { "aurora", "aurora_abb_powerone", "aussie_broadband", + "autarco", "awair", "axis", "azure_data_explorer", @@ -80,6 +82,7 @@ FLOWS = { "blink", "blue_current", "bluemaestro", + "bluesound", "bluetooth", "bmw_connected_drive", "bond", @@ -90,6 +93,7 @@ FLOWS = { "brother", "brottsplatskartan", "brunt", + "bryant_evolution", "bsblan", "bthome", "buienradar", @@ -98,6 +102,7 @@ FLOWS = { "cast", "ccm15", "cert_expiry", + "chacon_dio", "cloudflare", "co2signal", "coinbase", @@ -145,6 +150,7 @@ FLOWS = { "efergy", "electrasmart", "electric_kiwi", + "elevenlabs", "elgato", "elkm1", "elmax", @@ -199,6 +205,7 @@ FLOWS = { "gardena_bluetooth", "gdacs", "generic", + "geniushub", "geo_json_events", "geocaching", "geofency", @@ -266,10 +273,13 @@ FLOWS = { "intellifire", "ios", "iotawatt", + "iotty", "ipma", "ipp", "iqvia", + "iron_os", "islamic_prayer_times", + "israel_rail", "iss", "ista_ecotrend", "isy994", @@ -306,6 +316,7 @@ FLOWS = { "lidarr", "lifx", "linear_garage_door", + "linkplay", "litejet", "litterrobot", "livisi", @@ -321,7 +332,9 @@ FLOWS = { "lutron", "lutron_caseta", "lyric", + "madvr", "mailgun", + "mastodon", "matter", "mealie", "meater", @@ -473,6 +486,7 @@ FLOWS = { "rpi_power", "rtsp_to_webrtc", "ruckus_unleashed", + "russound_rio", "ruuvi_gateway", "ruuvitag_ble", "rympro", @@ -496,6 +510,7 @@ FLOWS = { "shelly", "shopping_list", "sia", + "simplefin", "simplepush", "simplisafe", "skybell", @@ -558,6 +573,7 @@ FLOWS = { "technove", "tedee", "tellduslive", + "tesla_fleet", "tesla_wall_connector", "teslemetry", "tessie", @@ -616,6 +632,7 @@ FLOWS = { "volumio", "volvooncall", "vulcan", + "wake_on_lan", "wallbox", "waqi", "watttime", @@ -635,6 +652,7 @@ FLOWS = { "wled", "wolflink", "workday", + "worldclock", "ws66i", "wyoming", "xbox", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index a3db08e57c2..13009fb58be 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -200,12 +200,6 @@ "amazon": { "name": "Amazon", "integrations": { - "alexa": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Amazon Alexa" - }, "amazon_polly": { "integration_type": "hub", "config_flow": false, @@ -396,7 +390,7 @@ "iot_class": "cloud_push" }, "aprilaire": { - "name": "Aprilaire", + "name": "AprilAire", "integration_type": "device", "config_flow": true, "iot_class": "local_push" @@ -490,12 +484,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "assist_pipeline": { - "name": "Assist pipeline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "asterisk": { "name": "Asterisk", "integrations": { @@ -581,6 +569,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "autarco": { + "name": "Autarco", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "avion": { "name": "Avi-on", "integration_type": "hub", @@ -629,12 +623,6 @@ "config_flow": true, "iot_class": "local_push" }, - "bayesian": { - "name": "Bayesian", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "bbox": { "name": "Bbox", "integration_type": "hub", @@ -725,7 +713,7 @@ "bluesound": { "name": "Bluesound", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "bluetooth": { @@ -810,6 +798,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "bryant_evolution": { + "name": "Bryant Evolution", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "bsblan": { "name": "BSB-Lan", "integration_type": "device", @@ -878,6 +872,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "chacon_dio": { + "name": "Chacon DiO", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "channels": { "name": "Channels", "integration_type": "hub", @@ -1339,7 +1339,7 @@ "iot_class": "local_push" }, "dsmr": { - "name": "DSMR Slimme Meter", + "name": "DSMR Smart Meter", "integration_type": "hub", "config_flow": true, "iot_class": "local_push" @@ -1504,6 +1504,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "elevenlabs": { + "name": "ElevenLabs", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "elgato": { "name": "Elgato", "integrations": { @@ -1792,11 +1798,6 @@ "ffmpeg": { "name": "FFmpeg", "integrations": { - "ffmpeg": { - "integration_type": "hub", - "config_flow": false, - "name": "FFmpeg" - }, "ffmpeg_motion": { "integration_type": "hub", "config_flow": false, @@ -1834,12 +1835,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "filter": { - "name": "Filter", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "fints": { "name": "FinTS", "integration_type": "service", @@ -2124,7 +2119,7 @@ "geniushub": { "name": "Genius Hub", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "geo_json_events": { @@ -2236,12 +2231,6 @@ "google": { "name": "Google", "integrations": { - "google_assistant": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Google Assistant" - }, "google_assistant_sdk": { "integration_type": "service", "config_flow": true, @@ -2518,12 +2507,6 @@ "config_flow": true, "iot_class": "local_polling" }, - "history_stats": { - "name": "History Stats", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "hitron_coda": { "name": "Rogers Hitron CODA", "integration_type": "hub", @@ -2863,6 +2846,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "iotty": { + "name": "iotty", + "integration_type": "device", + "config_flow": true, + "iot_class": "cloud_polling" + }, "iperf3": { "name": "Iperf3", "integration_type": "hub", @@ -2893,6 +2882,12 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "iron_os": { + "name": "IronOS", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "islamic_prayer_times": { "integration_type": "hub", "config_flow": true, @@ -2903,6 +2898,12 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, + "israel_rail": { + "name": "Israel Railways", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "iss": { "name": "International Space Station (ISS)", "integration_type": "service", @@ -3262,6 +3263,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "linkplay": { + "name": "LinkPlay", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "linksys_smart": { "name": "Linksys Smart Wi-Fi", "integration_type": "hub", @@ -3358,12 +3365,6 @@ "iot_class": "local_push", "name": "Logitech Harmony Hub" }, - "ue_smart_radio": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_polling", - "name": "Logitech UE Smart Radio" - }, "squeezebox": { "integration_type": "hub", "config_flow": true, @@ -3447,6 +3448,12 @@ "integration_type": "virtual", "supported_by": "motion_blinds" }, + "madvr": { + "name": "madVR Envy", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_push" + }, "mailgun": { "name": "Mailgun", "integration_type": "hub", @@ -3477,8 +3484,8 @@ }, "mastodon": { "name": "Mastodon", - "integration_type": "hub", - "config_flow": false, + "integration_type": "service", + "config_flow": true, "iot_class": "cloud_push" }, "matrix": { @@ -3559,6 +3566,11 @@ "config_flow": false, "iot_class": "cloud_polling" }, + "mercury_nz": { + "name": "Mercury NZ Limited", + "integration_type": "virtual", + "supported_by": "opower" + }, "message_bird": { "name": "MessageBird", "integration_type": "hub", @@ -3695,6 +3707,11 @@ "config_flow": true, "iot_class": "local_polling" }, + "mini_connected": { + "name": "MINI Connected", + "integration_type": "virtual", + "supported_by": "bmw_connected_drive" + }, "minio": { "name": "Minio", "integration_type": "hub", @@ -4565,6 +4582,11 @@ "config_flow": false, "iot_class": "local_push" }, + "pinecil": { + "name": "Pinecil", + "integration_type": "virtual", + "supported_by": "iron_os" + }, "ping": { "name": "Ping (ICMP)", "integration_type": "hub", @@ -5147,7 +5169,7 @@ "integrations": { "russound_rio": { "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push", "name": "Russound RIO" }, @@ -5421,6 +5443,12 @@ "config_flow": false, "iot_class": "cloud_push" }, + "simplefin": { + "name": "SimpleFin", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "simplepush": { "name": "Simplepush", "integration_type": "hub", @@ -5962,10 +5990,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "tag": { - "integration_type": "hub", - "config_flow": false - }, "tailscale": { "name": "Tailscale", "integration_type": "hub", @@ -6104,6 +6128,12 @@ "config_flow": true, "iot_class": "local_polling", "name": "Tesla Wall Connector" + }, + "tesla_fleet": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Tesla Fleet" } } }, @@ -6714,7 +6744,7 @@ "wake_on_lan": { "name": "Wake on LAN", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "wallbox": { @@ -6763,11 +6793,6 @@ } } }, - "webhook": { - "name": "Webhook", - "integration_type": "hub", - "config_flow": false - }, "webmin": { "name": "Webmin", "integration_type": "device", @@ -6847,7 +6872,7 @@ "worldclock": { "name": "Worldclock", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "worldtidesinfo": { @@ -7133,6 +7158,12 @@ } }, "helper": { + "bayesian": { + "name": "Bayesian", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_polling" + }, "counter": { "integration_type": "helper", "config_flow": false @@ -7142,6 +7173,12 @@ "config_flow": true, "iot_class": "calculated" }, + "filter": { + "name": "Filter", + "integration_type": "helper", + "config_flow": false, + "iot_class": "local_push" + }, "generic_hygrostat": { "integration_type": "helper", "config_flow": true, @@ -7157,6 +7194,12 @@ "config_flow": true, "iot_class": "calculated" }, + "history_stats": { + "name": "History Stats", + "integration_type": "helper", + "config_flow": true, + "iot_class": "local_polling" + }, "input_boolean": { "integration_type": "helper", "config_flow": false @@ -7290,7 +7333,6 @@ "shopping_list", "sun", "switch_as_x", - "tag", "threshold", "time_date", "tod", diff --git a/homeassistant/generated/languages.py b/homeassistant/generated/languages.py index feedd373fd9..78105c76f4c 100644 --- a/homeassistant/generated/languages.py +++ b/homeassistant/generated/languages.py @@ -44,6 +44,7 @@ LANGUAGES = { "lb", "lt", "lv", + "mk", "ml", "nb", "nl", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 8efe49b7892..7cd60da2d0e 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -589,6 +589,11 @@ ZEROCONF = { "name": "gateway*", }, ], + "_linkplay._tcp.local.": [ + { + "domain": "linkplay", + }, + ], "_lookin._tcp.local.": [ { "domain": "lookin", @@ -646,6 +651,11 @@ ZEROCONF = { "name": "yeelink-*", }, ], + "_musc._tcp.local.": [ + { + "domain": "bluesound", + }, + ], "_nanoleafapi._tcp.local.": [ { "domain": "nanoleaf", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 5c4ead4e611..6f52569c38c 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Awaitable, Callable from contextlib import suppress +import socket from ssl import SSLContext import sys from types import MappingProxyType @@ -13,6 +14,7 @@ from typing import TYPE_CHECKING, Any import aiohttp from aiohttp import web from aiohttp.hdrs import CONTENT_TYPE, USER_AGENT +from aiohttp.resolver import AsyncResolver from aiohttp.web_exceptions import HTTPBadGateway, HTTPGatewayTimeout from homeassistant import config_entries @@ -23,7 +25,6 @@ from homeassistant.util import ssl as ssl_util from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import json_loads -from .backports.aiohttp_resolver import AsyncResolver from .frame import warn_use from .json import json_dumps @@ -82,7 +83,9 @@ class HassClientResponse(aiohttp.ClientResponse): @callback @bind_hass def async_get_clientsession( - hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 + hass: HomeAssistant, + verify_ssl: bool = True, + family: socket.AddressFamily = socket.AF_UNSPEC, ) -> aiohttp.ClientSession: """Return default aiohttp ClientSession. @@ -111,7 +114,7 @@ def async_create_clientsession( hass: HomeAssistant, verify_ssl: bool = True, auto_cleanup: bool = True, - family: int = 0, + family: socket.AddressFamily = socket.AF_UNSPEC, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies. @@ -142,7 +145,7 @@ def _async_create_clientsession( verify_ssl: bool = True, auto_cleanup_method: Callable[[HomeAssistant, aiohttp.ClientSession], None] | None = None, - family: int = 0, + family: socket.AddressFamily = socket.AF_UNSPEC, **kwargs: Any, ) -> aiohttp.ClientSession: """Create a new ClientSession with kwargs, i.e. for cookies.""" @@ -275,14 +278,18 @@ def _async_register_default_clientsession_shutdown( @callback -def _make_key(verify_ssl: bool = True, family: int = 0) -> tuple[bool, int]: +def _make_key( + verify_ssl: bool = True, family: socket.AddressFamily = socket.AF_UNSPEC +) -> tuple[bool, socket.AddressFamily]: """Make a key for connector or session pool.""" return (verify_ssl, family) @callback def _async_get_connector( - hass: HomeAssistant, verify_ssl: bool = True, family: int = 0 + hass: HomeAssistant, + verify_ssl: bool = True, + family: socket.AddressFamily = socket.AF_UNSPEC, ) -> aiohttp.BaseConnector: """Return the connector pool for aiohttp. diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 975750ebbdd..3e101f185ed 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -5,11 +5,13 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Iterable import dataclasses +from datetime import datetime from functools import cached_property from typing import Any, Literal, TypedDict from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -31,7 +33,7 @@ EVENT_AREA_REGISTRY_UPDATED: EventType[EventAreaRegistryUpdatedData] = EventType ) STORAGE_KEY = "core.area_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 6 +STORAGE_VERSION_MINOR = 7 class _AreaStoreData(TypedDict): @@ -44,6 +46,8 @@ class _AreaStoreData(TypedDict): labels: list[str] name: str picture: str | None + created_at: str + modified_at: str class AreasRegistryStoreData(TypedDict): @@ -83,6 +87,8 @@ class AreaEntry(NormalizedNameBaseRegistryEntry): "labels": list(self.labels), "name": self.name, "picture": self.picture, + "created_at": self.created_at.timestamp(), + "modified_at": self.modified_at.timestamp(), } ) ) @@ -125,6 +131,12 @@ class AreaRegistryStore(Store[AreasRegistryStoreData]): for area in old_data["areas"]: area["labels"] = [] + if old_minor_version < 7: + # Version 1.7 adds created_at and modiefied_at + created_at = utc_from_timestamp(0).isoformat() + for area in old_data["areas"]: + area["created_at"] = area["modified_at"] = created_at + if old_major_version > 1: raise NotImplementedError return old_data # type: ignore[return-value] @@ -315,17 +327,17 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): """Update name of area.""" old = self.areas[area_id] - new_values = {} - - for attr_name, value in ( - ("aliases", aliases), - ("icon", icon), - ("labels", labels), - ("picture", picture), - ("floor_id", floor_id), - ): - if value is not UNDEFINED and value != getattr(old, attr_name): - new_values[attr_name] = value + new_values: dict[str, Any] = { + attr_name: value + for attr_name, value in ( + ("aliases", aliases), + ("icon", icon), + ("labels", labels), + ("picture", picture), + ("floor_id", floor_id), + ) + if value is not UNDEFINED and value != getattr(old, attr_name) + } if name is not UNDEFINED and name != old.name: new_values["name"] = name @@ -334,8 +346,10 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): if not new_values: return old + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("area_registry.async_update") - new = self.areas[area_id] = dataclasses.replace(old, **new_values) # type: ignore[arg-type] + new = self.areas[area_id] = dataclasses.replace(old, **new_values) self.async_schedule_save() return new @@ -361,6 +375,8 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): name=area["name"], normalized_name=normalized_name, picture=area["picture"], + created_at=datetime.fromisoformat(area["created_at"]), + modified_at=datetime.fromisoformat(area["modified_at"]), ) self.areas = areas @@ -379,6 +395,8 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]): "labels": list(entry.labels), "name": entry.name, "picture": entry.picture, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.areas.values() ] diff --git a/homeassistant/helpers/backports/__init__.py b/homeassistant/helpers/backports/__init__.py deleted file mode 100644 index e672fe1d3d2..00000000000 --- a/homeassistant/helpers/backports/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Backports for helpers.""" diff --git a/homeassistant/helpers/backports/aiohttp_resolver.py b/homeassistant/helpers/backports/aiohttp_resolver.py deleted file mode 100644 index efa4ba4bb85..00000000000 --- a/homeassistant/helpers/backports/aiohttp_resolver.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Backport of aiohttp's AsyncResolver for Home Assistant. - -This is a backport of the AsyncResolver class from aiohttp 3.10. - -Before aiohttp 3.10, on system with IPv6 support, AsyncResolver would not fallback -to providing A records when AAAA records were not available. - -Additionally, unlike the ThreadedResolver, AsyncResolver -did not handle link-local addresses correctly. -""" - -from __future__ import annotations - -import asyncio -import socket -import sys -from typing import Any, TypedDict - -import aiodns -from aiohttp.abc import AbstractResolver - -# This is a backport of https://github.com/aio-libs/aiohttp/pull/8270 -# This can be removed once aiohttp 3.10 is the minimum supported version. - -_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV -_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0) - - -class ResolveResult(TypedDict): - """Resolve result. - - This is the result returned from an AbstractResolver's - resolve method. - - :param hostname: The hostname that was provided. - :param host: The IP address that was resolved. - :param port: The port that was resolved. - :param family: The address family that was resolved. - :param proto: The protocol that was resolved. - :param flags: The flags that were resolved. - """ - - hostname: str - host: str - port: int - family: int - proto: int - flags: int - - -class AsyncResolver(AbstractResolver): - """Use the `aiodns` package to make asynchronous DNS lookups.""" - - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Initialize the resolver.""" - if aiodns is None: - raise RuntimeError("Resolver requires aiodns library") - - self._loop = asyncio.get_running_loop() - self._resolver = aiodns.DNSResolver(*args, loop=self._loop, **kwargs) # type: ignore[misc] - - async def resolve( # type: ignore[override] - self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> list[ResolveResult]: - """Resolve a host name to an IP address.""" - try: - resp = await self._resolver.getaddrinfo( - host, - port=port, - type=socket.SOCK_STREAM, - family=family, # type: ignore[arg-type] - flags=socket.AI_ADDRCONFIG, - ) - except aiodns.error.DNSError as exc: - msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" - raise OSError(msg) from exc - hosts: list[ResolveResult] = [] - for node in resp.nodes: - address: tuple[bytes, int] | tuple[bytes, int, int, int] = node.addr - family = node.family - if family == socket.AF_INET6: - if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID: - # This is essential for link-local IPv6 addresses. - # LL IPv6 is a VERY rare case. Strictly speaking, we should use - # getnameinfo() unconditionally, but performance makes sense. - result = await self._resolver.getnameinfo( - (address[0].decode("ascii"), *address[1:]), - _NUMERIC_SOCKET_FLAGS, - ) - resolved_host = result.node - else: - resolved_host = address[0].decode("ascii") - port = address[1] - else: # IPv4 - assert family == socket.AF_INET - resolved_host = address[0].decode("ascii") - port = address[1] - hosts.append( - ResolveResult( - hostname=host, - host=resolved_host, - port=port, - family=family, - proto=0, - flags=_NUMERIC_SOCKET_FLAGS, - ) - ) - - if not hosts: - raise OSError("DNS lookup failed") - - return hosts - - async def close(self) -> None: - """Close the resolver.""" - self._resolver.cancel() diff --git a/homeassistant/helpers/category_registry.py b/homeassistant/helpers/category_registry.py index 6498859e2ab..41fa82084b3 100644 --- a/homeassistant/helpers/category_registry.py +++ b/homeassistant/helpers/category_registry.py @@ -5,9 +5,11 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass, field -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.ulid import ulid_now @@ -23,13 +25,16 @@ EVENT_CATEGORY_REGISTRY_UPDATED: EventType[EventCategoryRegistryUpdatedData] = ( ) STORAGE_KEY = "core.category_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _CategoryStoreData(TypedDict): """Data type for individual category. Used in CategoryRegistryStoreData.""" category_id: str + created_at: str icon: str | None + modified_at: str name: str @@ -55,10 +60,36 @@ class CategoryEntry: """Category registry entry.""" category_id: str = field(default_factory=ulid_now) + created_at: datetime = field(default_factory=utcnow) icon: str | None = None + modified_at: datetime = field(default_factory=utcnow) name: str +class CategoryRegistryStore(Store[CategoryRegistryStoreData]): + """Store category registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, dict[str, list[dict[str, Any]]]], + ) -> CategoryRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for categories in old_data["categories"].values(): + for category in categories: + category["created_at"] = category["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Class to hold a registry of categories by scope.""" @@ -66,11 +97,12 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): """Initialize the category registry.""" self.hass = hass self.categories: dict[str, dict[str, CategoryEntry]] = {} - self._store = Store( + self._store = CategoryRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -145,7 +177,7 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): ) -> CategoryEntry: """Update name or icon of the category.""" old = self.categories[scope][category_id] - changes = {} + changes: dict[str, Any] = {} if icon is not UNDEFINED and icon != old.icon: changes["icon"] = icon @@ -157,8 +189,10 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("category_registry.async_update") - new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.categories[scope][category_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -180,7 +214,9 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): category_entries[scope] = { category["category_id"]: CategoryEntry( category_id=category["category_id"], + created_at=datetime.fromisoformat(category["created_at"]), icon=category["icon"], + modified_at=datetime.fromisoformat(category["modified_at"]), name=category["name"], ) for category in categories @@ -196,7 +232,9 @@ class CategoryRegistry(BaseRegistry[CategoryRegistryStoreData]): scope: [ { "category_id": entry.category_id, + "created_at": entry.created_at.isoformat(), "icon": entry.icon, + "modified_at": entry.modified_at.isoformat(), "name": entry.name, } for entry in entries.values() diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 0626e0033c4..06d836e8c20 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -22,7 +22,7 @@ from homeassistant.config import ( # type: ignore[attr-defined] load_yaml_config_file, merge_packages_config, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.requirements import ( RequirementsNotFound, @@ -157,10 +157,10 @@ async def async_check_ha_config_file( # noqa: C901 return result.add_error(f"Error loading {config_path}: {err}") # Extract and validate core [homeassistant] config - core_config = config.pop(HA_DOMAIN, {}) + core_config = config.pop(HOMEASSISTANT_DOMAIN, {}) try: core_config = CORE_CONFIG_SCHEMA(core_config) - result[HA_DOMAIN] = core_config + result[HOMEASSISTANT_DOMAIN] = core_config # Merge packages await merge_packages_config( @@ -168,8 +168,8 @@ async def async_check_ha_config_file( # noqa: C901 ) except vol.Invalid as err: result.add_error( - format_schema_error(hass, err, HA_DOMAIN, core_config), - HA_DOMAIN, + format_schema_error(hass, err, HOMEASSISTANT_DOMAIN, core_config), + HOMEASSISTANT_DOMAIN, core_config, ) core_config = {} diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 036aaacf0e9..9151a9dfc6b 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -642,8 +642,8 @@ class StorageCollectionWebsocket[_StorageCollectionT: StorageCollection]: } for change in change_set ] - for connection, msg_id in self._subscribers: - connection.send_message(websocket_api.event_message(msg_id, json_msg)) + for conn, msg_id in self._subscribers: + conn.send_message(websocket_api.event_message(msg_id, json_msg)) if not self._subscribers: self._remove_subscription = ( diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index e15b40a78df..3438336dbfa 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import deque -from collections.abc import Callable, Container +from collections.abc import Callable, Container, Generator from contextlib import contextmanager from datetime import datetime, time as dt_time, timedelta import functools as ft @@ -12,7 +12,6 @@ import re import sys from typing import Any, Protocol, cast -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import zone as zone_cmp diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 58c76a40c8e..cd6670dc597 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -34,6 +34,7 @@ from homeassistant.const import ( ATTR_FLOOR_ID, ATTR_LABEL_ID, CONF_ABOVE, + CONF_ACTION, CONF_ALIAS, CONF_ATTRIBUTE, CONF_BELOW, @@ -1204,7 +1205,7 @@ PLATFORM_SCHEMA = vol.Schema( PLATFORM_SCHEMA_BASE = PLATFORM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) -ENTITY_SERVICE_FIELDS = { +ENTITY_SERVICE_FIELDS: VolDictType = { # Either accept static entity IDs, a single dynamic template or a mixed list # of static and dynamic templates. While this could be solved with a single # complex template, handling it like this, keeps config validation useful. @@ -1310,7 +1311,7 @@ def script_action(value: Any) -> dict: SCRIPT_SCHEMA = vol.All(ensure_list, [script_action]) -SCRIPT_ACTION_BASE_SCHEMA = { +SCRIPT_ACTION_BASE_SCHEMA: VolDictType = { vol.Optional(CONF_ALIAS): string, vol.Optional(CONF_CONTINUE_ON_ERROR): boolean, vol.Optional(CONF_ENABLED): vol.Any(boolean, template), @@ -1325,11 +1326,30 @@ EVENT_SCHEMA = vol.Schema( } ) + +def _backward_compat_service_schema(value: Any | None) -> Any: + """Backward compatibility for service schemas.""" + + if not isinstance(value, dict): + return value + + # `service` has been renamed to `action` + if CONF_SERVICE in value: + if CONF_ACTION in value: + raise vol.Invalid( + "Cannot specify both 'service' and 'action'. Please use 'action' only." + ) + value[CONF_ACTION] = value.pop(CONF_SERVICE) + + return value + + SERVICE_SCHEMA = vol.All( + _backward_compat_service_schema, vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Exclusive(CONF_SERVICE, "service name"): vol.Any( + vol.Exclusive(CONF_ACTION, "service name"): vol.Any( service, dynamic_template ), vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any( @@ -1348,7 +1368,7 @@ SERVICE_SCHEMA = vol.All( vol.Remove("metadata"): dict, } ), - has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE), + has_at_least_one_key(CONF_ACTION, CONF_SERVICE_TEMPLATE), ) NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any( @@ -1844,6 +1864,7 @@ ACTIONS_MAP = { CONF_WAIT_FOR_TRIGGER: SCRIPT_ACTION_WAIT_FOR_TRIGGER, CONF_VARIABLES: SCRIPT_ACTION_VARIABLES, CONF_IF: SCRIPT_ACTION_IF, + CONF_ACTION: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE_TEMPLATE: SCRIPT_ACTION_CALL_SERVICE, CONF_STOP: SCRIPT_ACTION_STOP, diff --git a/homeassistant/helpers/data_entry_flow.py b/homeassistant/helpers/data_entry_flow.py index 2adab32195b..b2cad292e3d 100644 --- a/homeassistant/helpers/data_entry_flow.py +++ b/homeassistant/helpers/data_entry_flow.py @@ -47,7 +47,7 @@ class _BaseFlowManagerView(HomeAssistantView, Generic[_FlowManagerT]): data = result.copy() if (schema := data["data_schema"]) is None: - data["data_schema"] = [] + data["data_schema"] = [] # type: ignore[typeddict-item] # json result type else: data["data_schema"] = voluptuous_serialize.convert( schema, custom_serializer=cv.custom_serializer diff --git a/homeassistant/helpers/device.py b/homeassistant/helpers/device.py index e1b9ded5723..16212422236 100644 --- a/homeassistant/helpers/device.py +++ b/homeassistant/helpers/device.py @@ -26,7 +26,10 @@ def async_device_info_to_link_from_entity( hass: HomeAssistant, entity_id_or_uuid: str, ) -> dr.DeviceInfo | None: - """DeviceInfo with information to link a device to a configuration entry in the link category from a entity id or entity uuid.""" + """DeviceInfo with information to link a device from an entity. + + DeviceInfo will only return information to categorize as a link. + """ return async_device_info_to_link_from_device_id( hass, @@ -39,7 +42,10 @@ def async_device_info_to_link_from_device_id( hass: HomeAssistant, device_id: str | None, ) -> dr.DeviceInfo | None: - """DeviceInfo with information to link a device to a configuration entry in the link category from a device id.""" + """DeviceInfo with information to link a device from a device id. + + DeviceInfo will only return information to categorize as a link. + """ dev_reg = dr.async_get(hass) @@ -58,7 +64,11 @@ def async_remove_stale_devices_links_keep_entity_device( entry_id: str, source_entity_id_or_uuid: str, ) -> None: - """Remove the link between stales devices and a configuration entry, keeping only the device that the informed entity is linked to.""" + """Remove the link between stale devices and a configuration entry. + + Only the device passed in the source_entity_id_or_uuid parameter + linked to the configuration entry will be maintained. + """ async_remove_stale_devices_links_keep_current_device( hass=hass, @@ -73,9 +83,10 @@ def async_remove_stale_devices_links_keep_current_device( entry_id: str, current_device_id: str | None, ) -> None: - """Remove the link between stales devices and a configuration entry, keeping only the device informed. + """Remove the link between stale devices and a configuration entry. - Device passed in the current_device_id parameter will be kept linked to the configuration entry. + Only the device passed in the current_device_id parameter linked to + the configuration entry will be maintained. """ dev_reg = dr.async_get(hass) diff --git a/homeassistant/helpers/device_registry.py b/homeassistant/helpers/device_registry.py index 4579739f0e1..30001a64474 100644 --- a/homeassistant/helpers/device_registry.py +++ b/homeassistant/helpers/device_registry.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections import defaultdict from collections.abc import Mapping +from datetime import datetime from enum import StrEnum from functools import cached_property, lru_cache, partial import logging @@ -23,6 +24,7 @@ from homeassistant.core import ( ) from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_suggest_report_issue +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -55,7 +57,7 @@ EVENT_DEVICE_REGISTRY_UPDATED: EventType[EventDeviceRegistryUpdatedData] = Event ) STORAGE_KEY = "core.device_registry" STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 6 +STORAGE_VERSION_MINOR = 8 CLEANUP_DELAY = 10 @@ -94,6 +96,7 @@ class DeviceInfo(TypedDict, total=False): configuration_url: str | URL | None connections: set[tuple[str, str]] + created_at: str default_manufacturer: str default_model: str default_name: str @@ -101,6 +104,8 @@ class DeviceInfo(TypedDict, total=False): identifiers: set[tuple[str, str]] manufacturer: str | None model: str | None + model_id: str | None + modified_at: str name: str | None serial_number: str | None suggested_area: str | None @@ -127,6 +132,7 @@ DEVICE_INFO_TYPES = { "identifiers", "manufacturer", "model", + "model_id", "name", "serial_number", "suggested_area", @@ -279,6 +285,7 @@ class DeviceEntry: config_entries: set[str] = attr.ib(converter=set, factory=set) configuration_url: str | None = attr.ib(default=None) connections: set[tuple[str, str]] = attr.ib(converter=set, factory=set) + created_at: datetime = attr.ib(factory=utcnow) disabled_by: DeviceEntryDisabler | None = attr.ib(default=None) entry_type: DeviceEntryType | None = attr.ib(default=None) hw_version: str | None = attr.ib(default=None) @@ -287,6 +294,8 @@ class DeviceEntry: labels: set[str] = attr.ib(converter=set, factory=set) manufacturer: str | None = attr.ib(default=None) model: str | None = attr.ib(default=None) + model_id: str | None = attr.ib(default=None) + modified_at: datetime = attr.ib(factory=utcnow) name_by_user: str | None = attr.ib(default=None) name: str | None = attr.ib(default=None) primary_config_entry: str | None = attr.ib(default=None) @@ -313,6 +322,7 @@ class DeviceEntry: "configuration_url": self.configuration_url, "config_entries": list(self.config_entries), "connections": list(self.connections), + "created_at": self.created_at.timestamp(), "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -321,6 +331,8 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, + "model_id": self.model_id, + "modified_at": self.modified_at.timestamp(), "name_by_user": self.name_by_user, "name": self.name, "primary_config_entry": self.primary_config_entry, @@ -355,6 +367,7 @@ class DeviceEntry: "config_entries": list(self.config_entries), "configuration_url": self.configuration_url, "connections": list(self.connections), + "created_at": self.created_at.isoformat(), "disabled_by": self.disabled_by, "entry_type": self.entry_type, "hw_version": self.hw_version, @@ -363,6 +376,8 @@ class DeviceEntry: "labels": list(self.labels), "manufacturer": self.manufacturer, "model": self.model, + "model_id": self.model_id, + "modified_at": self.modified_at.isoformat(), "name_by_user": self.name_by_user, "name": self.name, "primary_config_entry": self.primary_config_entry, @@ -383,6 +398,8 @@ class DeletedDeviceEntry: identifiers: set[tuple[str, str]] = attr.ib() id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() + created_at: datetime = attr.ib(factory=utcnow) + modified_at: datetime = attr.ib(factory=utcnow) def to_device_entry( self, @@ -395,6 +412,7 @@ class DeletedDeviceEntry: # type ignores: likely https://github.com/python/mypy/issues/8625 config_entries={config_entry_id}, # type: ignore[arg-type] connections=self.connections & connections, # type: ignore[arg-type] + created_at=self.created_at, identifiers=self.identifiers & identifiers, # type: ignore[arg-type] id=self.id, is_new=True, @@ -408,9 +426,11 @@ class DeletedDeviceEntry: { "config_entries": list(self.config_entries), "connections": list(self.connections), + "created_at": self.created_at.isoformat(), "identifiers": list(self.identifiers), "id": self.id, "orphaned_timestamp": self.orphaned_timestamp, + "modified_at": self.modified_at.isoformat(), } ) ) @@ -478,11 +498,22 @@ class DeviceRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): if old_minor_version < 5: # Introduced in 2024.3 for device in old_data["devices"]: - device["labels"] = device.get("labels", []) + device["labels"] = [] if old_minor_version < 6: # Introduced in 2024.7 for device in old_data["devices"]: - device.setdefault("primary_config_entry", None) + device["primary_config_entry"] = None + if old_minor_version < 7: + # Introduced in 2024.8 + for device in old_data["devices"]: + device["model_id"] = None + if old_minor_version < 8: + # Introduced in 2024.8 + created_at = utc_from_timestamp(0).isoformat() + for device in old_data["devices"]: + device["created_at"] = device["modified_at"] = created_at + for device in old_data["deleted_devices"]: + device["created_at"] = device["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -679,6 +710,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): config_entry_id: str, configuration_url: str | URL | None | UndefinedType = UNDEFINED, connections: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, + created_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored default_manufacturer: str | None | UndefinedType = UNDEFINED, default_model: str | None | UndefinedType = UNDEFINED, default_name: str | None | UndefinedType = UNDEFINED, @@ -689,6 +721,8 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): identifiers: set[tuple[str, str]] | None | UndefinedType = UNDEFINED, manufacturer: str | None | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, + model_id: str | None | UndefinedType = UNDEFINED, + modified_at: str | datetime | UndefinedType = UNDEFINED, # will be ignored name: str | None | UndefinedType = UNDEFINED, serial_number: str | None | UndefinedType = UNDEFINED, suggested_area: str | None | UndefinedType = UNDEFINED, @@ -735,6 +769,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("identifiers", identifiers), ("manufacturer", manufacturer), ("model", model), + ("model_id", model_id), ("name", name), ("serial_number", serial_number), ("suggested_area", suggested_area), @@ -810,6 +845,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections=connections or UNDEFINED, merge_identifiers=identifiers or UNDEFINED, model=model, + model_id=model_id, name=name, serial_number=serial_number, suggested_area=suggested_area, @@ -843,6 +879,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): merge_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, merge_identifiers: set[tuple[str, str]] | UndefinedType = UNDEFINED, model: str | None | UndefinedType = UNDEFINED, + model_id: str | None | UndefinedType = UNDEFINED, name_by_user: str | None | UndefinedType = UNDEFINED, name: str | None | UndefinedType = UNDEFINED, new_connections: set[tuple[str, str]] | UndefinedType = UNDEFINED, @@ -1004,6 +1041,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): ("labels", labels), ("manufacturer", manufacturer), ("model", model), + ("model_id", model_id), ("name", name), ("name_by_user", name_by_user), ("serial_number", serial_number), @@ -1021,6 +1059,10 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): if not new_values: return old + if not RUNTIME_ONLY_ATTRS.issuperset(new_values): + # Change modified_at if we are changing something that we store + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("device_registry.async_update_device") new = attr.evolve(old, **new_values) self.devices[device_id] = new @@ -1100,6 +1142,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): self.deleted_devices[device_id] = DeletedDeviceEntry( config_entries=device.config_entries, connections=device.connections, + created_at=device.created_at, identifiers=device.identifiers, id=device.id, orphaned_timestamp=None, @@ -1135,6 +1178,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): tuple(conn) # type: ignore[misc] for conn in device["connections"] }, + created_at=datetime.fromisoformat(device["created_at"]), disabled_by=( DeviceEntryDisabler(device["disabled_by"]) if device["disabled_by"] @@ -1154,6 +1198,8 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): labels=set(device["labels"]), manufacturer=device["manufacturer"], model=device["model"], + model_id=device["model_id"], + modified_at=datetime.fromisoformat(device["modified_at"]), name_by_user=device["name_by_user"], name=device["name"], primary_config_entry=device["primary_config_entry"], @@ -1166,8 +1212,10 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]): deleted_devices[device["id"]] = DeletedDeviceEntry( config_entries=set(device["config_entries"]), connections={tuple(conn) for conn in device["connections"]}, + created_at=datetime.fromisoformat(device["created_at"]), identifiers={tuple(iden) for iden in device["identifiers"]}, id=device["id"], + modified_at=datetime.fromisoformat(device["modified_at"]), orphaned_timestamp=device["orphaned_timestamp"], ) diff --git a/homeassistant/helpers/entity.py b/homeassistant/helpers/entity.py index cf910a5cba8..dbc1a036ef6 100644 --- a/homeassistant/helpers/entity.py +++ b/homeassistant/helpers/entity.py @@ -263,8 +263,6 @@ class CalculatedState: attributes: dict[str, Any] # Capability attributes returned by the capability_attributes property capability_attributes: Mapping[str, Any] | None - # Attributes which may be overridden by the entity registry - shadowed_attributes: Mapping[str, Any] class CachedProperties(type): @@ -1042,18 +1040,20 @@ class Entity( @callback def _async_calculate_state(self) -> CalculatedState: """Calculate state string and attribute mapping.""" - return CalculatedState(*self.__async_calculate_state()) + state, attr, capabilities, _, _ = self.__async_calculate_state() + return CalculatedState(state, attr, capabilities) def __async_calculate_state( self, - ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, Mapping[str, Any]]: + ) -> tuple[str, dict[str, Any], Mapping[str, Any] | None, str | None, int | None]: """Calculate state string and attribute mapping. - Returns a tuple (state, attr, capability_attr, shadowed_attr). + Returns a tuple: state - the stringified state attr - the attribute dictionary capability_attr - a mapping with capability attributes - shadowed_attr - a mapping with attributes which may be overridden + original_device_class - the device class which may be overridden + supported_features - the supported features This method is called when writing the state to avoid the overhead of creating a dataclass object. @@ -1062,7 +1062,6 @@ class Entity( capability_attr = self.capability_attributes attr = capability_attr.copy() if capability_attr else {} - shadowed_attr = {} available = self.available # only call self.available once per update cycle state = self._stringify_state(available) @@ -1081,30 +1080,27 @@ class Entity( if (attribution := self.attribution) is not None: attr[ATTR_ATTRIBUTION] = attribution - shadowed_attr[ATTR_DEVICE_CLASS] = self.device_class + original_device_class = self.device_class if ( - device_class := (entry and entry.device_class) - or shadowed_attr[ATTR_DEVICE_CLASS] + device_class := (entry and entry.device_class) or original_device_class ) is not None: attr[ATTR_DEVICE_CLASS] = str(device_class) if (entity_picture := self.entity_picture) is not None: attr[ATTR_ENTITY_PICTURE] = entity_picture - shadowed_attr[ATTR_ICON] = self.icon - if (icon := (entry and entry.icon) or shadowed_attr[ATTR_ICON]) is not None: + if (icon := (entry and entry.icon) or self.icon) is not None: attr[ATTR_ICON] = icon - shadowed_attr[ATTR_FRIENDLY_NAME] = self._friendly_name_internal() if ( - name := (entry and entry.name) or shadowed_attr[ATTR_FRIENDLY_NAME] + name := (entry and entry.name) or self._friendly_name_internal() ) is not None: attr[ATTR_FRIENDLY_NAME] = name if (supported_features := self.supported_features) is not None: attr[ATTR_SUPPORTED_FEATURES] = supported_features - return (state, attr, capability_attr, shadowed_attr) + return (state, attr, capability_attr, original_device_class, supported_features) @callback def _async_write_ha_state(self) -> None: @@ -1130,14 +1126,15 @@ class Entity( return state_calculate_start = timer() - state, attr, capabilities, shadowed_attr = self.__async_calculate_state() + state, attr, capabilities, original_device_class, supported_features = ( + self.__async_calculate_state() + ) time_now = timer() if entry: # Make sure capabilities in the entity registry are up to date. Capabilities # include capability attributes, device class and supported features - original_device_class: str | None = shadowed_attr[ATTR_DEVICE_CLASS] - supported_features: int = attr.get(ATTR_SUPPORTED_FEATURES) or 0 + supported_features = supported_features or 0 if ( capabilities != entry.capabilities or original_device_class != entry.original_device_class @@ -1188,11 +1185,18 @@ class Entity( report_issue, ) - # Overwrite properties that have been set in the config file. - if (customize := hass.data.get(DATA_CUSTOMIZE)) and ( - custom := customize.get(entity_id) - ): - attr.update(custom) + try: + # Most of the time this will already be + # set and since try is near zero cost + # on py3.11+ its faster to assume it is + # set and catch the exception if it is not. + customize = hass.data[DATA_CUSTOMIZE] + except KeyError: + pass + else: + # Overwrite properties that have been set in the config file. + if custom := customize.get(entity_id): + attr.update(custom) if ( self._context_set is not None @@ -1202,7 +1206,7 @@ class Entity( self._context_set = None try: - hass.states.async_set( + hass.states.async_set_internal( entity_id, state, attr, diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index d868e582f8f..6774780f00f 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -985,7 +985,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType | None, + schema: VolDictType | VolSchemaType, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, diff --git a/homeassistant/helpers/entity_registry.py b/homeassistant/helpers/entity_registry.py index dabe2e61917..5d17c0c46b1 100644 --- a/homeassistant/helpers/entity_registry.py +++ b/homeassistant/helpers/entity_registry.py @@ -48,6 +48,7 @@ from homeassistant.core import ( from homeassistant.exceptions import MaxLengthExceeded from homeassistant.loader import async_suggest_report_issue from homeassistant.util import slugify, uuid as uuid_util +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey from homeassistant.util.json import format_unserializable_data @@ -74,7 +75,7 @@ EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = Event _LOGGER = logging.getLogger(__name__) STORAGE_VERSION_MAJOR = 1 -STORAGE_VERSION_MINOR = 14 +STORAGE_VERSION_MINOR = 15 STORAGE_KEY = "core.entity_registry" CLEANUP_INTERVAL = 3600 * 24 @@ -174,6 +175,7 @@ class RegistryEntry: categories: dict[str, str] = attr.ib(factory=dict) capabilities: Mapping[str, Any] | None = attr.ib(default=None) config_entry_id: str | None = attr.ib(default=None) + created_at: datetime = attr.ib(factory=utcnow) device_class: str | None = attr.ib(default=None) device_id: str | None = attr.ib(default=None) domain: str = attr.ib(init=False, repr=False) @@ -187,6 +189,7 @@ class RegistryEntry: ) has_entity_name: bool = attr.ib(default=False) labels: set[str] = attr.ib(factory=set) + modified_at: datetime = attr.ib(factory=utcnow) name: str | None = attr.ib(default=None) options: ReadOnlyEntityOptionsType = attr.ib( default=None, converter=_protect_entity_options @@ -271,6 +274,7 @@ class RegistryEntry: "area_id": self.area_id, "categories": self.categories, "config_entry_id": self.config_entry_id, + "created_at": self.created_at.timestamp(), "device_id": self.device_id, "disabled_by": self.disabled_by, "entity_category": self.entity_category, @@ -280,6 +284,7 @@ class RegistryEntry: "icon": self.icon, "id": self.id, "labels": list(self.labels), + "modified_at": self.modified_at.timestamp(), "name": self.name, "options": self.options, "original_name": self.original_name, @@ -330,6 +335,7 @@ class RegistryEntry: "categories": self.categories, "capabilities": self.capabilities, "config_entry_id": self.config_entry_id, + "created_at": self.created_at.isoformat(), "device_class": self.device_class, "device_id": self.device_id, "disabled_by": self.disabled_by, @@ -340,6 +346,7 @@ class RegistryEntry: "id": self.id, "has_entity_name": self.has_entity_name, "labels": list(self.labels), + "modified_at": self.modified_at.isoformat(), "name": self.name, "options": self.options, "original_device_class": self.original_device_class, @@ -395,6 +402,8 @@ class DeletedRegistryEntry: domain: str = attr.ib(init=False, repr=False) id: str = attr.ib() orphaned_timestamp: float | None = attr.ib() + created_at: datetime = attr.ib(factory=utcnow) + modified_at: datetime = attr.ib(factory=utcnow) @domain.default def _domain_default(self) -> str: @@ -408,8 +417,10 @@ class DeletedRegistryEntry: json_bytes( { "config_entry_id": self.config_entry_id, + "created_at": self.created_at.isoformat(), "entity_id": self.entity_id, "id": self.id, + "modified_at": self.modified_at.isoformat(), "orphaned_timestamp": self.orphaned_timestamp, "platform": self.platform, "unique_id": self.unique_id, @@ -429,88 +440,97 @@ class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]): ) -> dict: """Migrate to the new version.""" data = old_data - if old_major_version == 1 and old_minor_version < 2: - # Version 1.2 implements migration and freezes the available keys - for entity in data["entities"]: - # Populate keys which were introduced before version 1.2 - entity.setdefault("area_id", None) - entity.setdefault("capabilities", {}) - entity.setdefault("config_entry_id", None) - entity.setdefault("device_class", None) - entity.setdefault("device_id", None) - entity.setdefault("disabled_by", None) - entity.setdefault("entity_category", None) - entity.setdefault("icon", None) - entity.setdefault("name", None) - entity.setdefault("original_icon", None) - entity.setdefault("original_name", None) - entity.setdefault("supported_features", 0) - entity.setdefault("unit_of_measurement", None) + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and freezes the available keys + for entity in data["entities"]: + # Populate keys which were introduced before version 1.2 + entity.setdefault("area_id", None) + entity.setdefault("capabilities", {}) + entity.setdefault("config_entry_id", None) + entity.setdefault("device_class", None) + entity.setdefault("device_id", None) + entity.setdefault("disabled_by", None) + entity.setdefault("entity_category", None) + entity.setdefault("icon", None) + entity.setdefault("name", None) + entity.setdefault("original_icon", None) + entity.setdefault("original_name", None) + entity.setdefault("supported_features", 0) + entity.setdefault("unit_of_measurement", None) - if old_major_version == 1 and old_minor_version < 3: - # Version 1.3 adds original_device_class - for entity in data["entities"]: - # Move device_class to original_device_class - entity["original_device_class"] = entity["device_class"] - entity["device_class"] = None + if old_minor_version < 3: + # Version 1.3 adds original_device_class + for entity in data["entities"]: + # Move device_class to original_device_class + entity["original_device_class"] = entity["device_class"] + entity["device_class"] = None - if old_major_version == 1 and old_minor_version < 4: - # Version 1.4 adds id - for entity in data["entities"]: - entity["id"] = uuid_util.random_uuid_hex() + if old_minor_version < 4: + # Version 1.4 adds id + for entity in data["entities"]: + entity["id"] = uuid_util.random_uuid_hex() - if old_major_version == 1 and old_minor_version < 5: - # Version 1.5 adds entity options - for entity in data["entities"]: - entity["options"] = {} + if old_minor_version < 5: + # Version 1.5 adds entity options + for entity in data["entities"]: + entity["options"] = {} - if old_major_version == 1 and old_minor_version < 6: - # Version 1.6 adds hidden_by - for entity in data["entities"]: - entity["hidden_by"] = None + if old_minor_version < 6: + # Version 1.6 adds hidden_by + for entity in data["entities"]: + entity["hidden_by"] = None - if old_major_version == 1 and old_minor_version < 7: - # Version 1.7 adds has_entity_name - for entity in data["entities"]: - entity["has_entity_name"] = False + if old_minor_version < 7: + # Version 1.7 adds has_entity_name + for entity in data["entities"]: + entity["has_entity_name"] = False - if old_major_version == 1 and old_minor_version < 8: - # Cleanup after frontend bug which incorrectly updated device_class - # Fixed by frontend PR #13551 - for entity in data["entities"]: - domain = split_entity_id(entity["entity_id"])[0] - if domain in [Platform.BINARY_SENSOR, Platform.COVER]: - continue - entity["device_class"] = None + if old_minor_version < 8: + # Cleanup after frontend bug which incorrectly updated device_class + # Fixed by frontend PR #13551 + for entity in data["entities"]: + domain = split_entity_id(entity["entity_id"])[0] + if domain in [Platform.BINARY_SENSOR, Platform.COVER]: + continue + entity["device_class"] = None - if old_major_version == 1 and old_minor_version < 9: - # Version 1.9 adds translation_key - for entity in data["entities"]: - entity["translation_key"] = None + if old_minor_version < 9: + # Version 1.9 adds translation_key + for entity in data["entities"]: + entity["translation_key"] = None - if old_major_version == 1 and old_minor_version < 10: - # Version 1.10 adds aliases - for entity in data["entities"]: - entity["aliases"] = [] + if old_minor_version < 10: + # Version 1.10 adds aliases + for entity in data["entities"]: + entity["aliases"] = [] - if old_major_version == 1 and old_minor_version < 11: - # Version 1.11 adds deleted_entities - data["deleted_entities"] = data.get("deleted_entities", []) + if old_minor_version < 11: + # Version 1.11 adds deleted_entities + data["deleted_entities"] = data.get("deleted_entities", []) - if old_major_version == 1 and old_minor_version < 12: - # Version 1.12 adds previous_unique_id - for entity in data["entities"]: - entity["previous_unique_id"] = None + if old_minor_version < 12: + # Version 1.12 adds previous_unique_id + for entity in data["entities"]: + entity["previous_unique_id"] = None - if old_major_version == 1 and old_minor_version < 13: - # Version 1.13 adds labels - for entity in data["entities"]: - entity["labels"] = [] + if old_minor_version < 13: + # Version 1.13 adds labels + for entity in data["entities"]: + entity["labels"] = [] - if old_major_version == 1 and old_minor_version < 14: - # Version 1.14 adds categories - for entity in data["entities"]: - entity["categories"] = {} + if old_minor_version < 14: + # Version 1.14 adds categories + for entity in data["entities"]: + entity["categories"] = {} + + if old_minor_version < 15: + # Version 1.15 adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for entity in data["entities"]: + entity["created_at"] = entity["modified_at"] = created_at + for entity in data["deleted_entities"]: + entity["created_at"] = entity["modified_at"] = created_at if old_major_version > 1: raise NotImplementedError @@ -837,10 +857,12 @@ class EntityRegistry(BaseRegistry): ) entity_registry_id: str | None = None + created_at = utcnow() deleted_entity = self.deleted_entities.pop((domain, platform, unique_id), None) if deleted_entity is not None: # Restore id entity_registry_id = deleted_entity.id + created_at = deleted_entity.created_at entity_id = self.async_generate_entity_id( domain, @@ -865,6 +887,7 @@ class EntityRegistry(BaseRegistry): entry = RegistryEntry( capabilities=none_if_undefined(capabilities), config_entry_id=none_if_undefined(config_entry_id), + created_at=created_at, device_id=none_if_undefined(device_id), disabled_by=disabled_by, entity_category=none_if_undefined(entity_category), @@ -906,6 +929,7 @@ class EntityRegistry(BaseRegistry): orphaned_timestamp = None if config_entry_id else time.time() self.deleted_entities[key] = DeletedRegistryEntry( config_entry_id=config_entry_id, + created_at=entity.created_at, entity_id=entity_id, id=entity.id, orphaned_timestamp=orphaned_timestamp, @@ -1093,6 +1117,8 @@ class EntityRegistry(BaseRegistry): if not new_values: return old + new_values["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("entity_registry.async_update_entity") new = self.entities[entity_id] = attr.evolve(old, **new_values) @@ -1260,6 +1286,7 @@ class EntityRegistry(BaseRegistry): categories=entity["categories"], capabilities=entity["capabilities"], config_entry_id=entity["config_entry_id"], + created_at=datetime.fromisoformat(entity["created_at"]), device_class=entity["device_class"], device_id=entity["device_id"], disabled_by=RegistryEntryDisabler(entity["disabled_by"]) @@ -1276,6 +1303,7 @@ class EntityRegistry(BaseRegistry): id=entity["id"], has_entity_name=entity["has_entity_name"], labels=set(entity["labels"]), + modified_at=datetime.fromisoformat(entity["modified_at"]), name=entity["name"], options=entity["options"], original_device_class=entity["original_device_class"], @@ -1307,8 +1335,10 @@ class EntityRegistry(BaseRegistry): ) deleted_entities[key] = DeletedRegistryEntry( config_entry_id=entity["config_entry_id"], + created_at=datetime.fromisoformat(entity["created_at"]), entity_id=entity["entity_id"], id=entity["id"], + modified_at=datetime.fromisoformat(entity["modified_at"]), orphaned_timestamp=entity["orphaned_timestamp"], platform=entity["platform"], unique_id=entity["unique_id"], diff --git a/homeassistant/helpers/entityfilter.py b/homeassistant/helpers/entityfilter.py index 24b65cba82a..1eaa0fb1404 100644 --- a/homeassistant/helpers/entityfilter.py +++ b/homeassistant/helpers/entityfilter.py @@ -4,7 +4,8 @@ from __future__ import annotations from collections.abc import Callable import fnmatch -from functools import lru_cache +from functools import lru_cache, partial +import operator import re import voluptuous as vol @@ -195,7 +196,7 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 1 - No filter # - All entities included if not have_include and not have_exclude: - return lambda entity_id: True + return bool # Case 2 - Only includes # - Entity listed in entities include: include @@ -280,4 +281,4 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 6 - No Domain and/or glob includes or excludes # - Entity listed in entities include: include # - Otherwise: exclude - return lambda entity_id: entity_id in include_e + return partial(operator.contains, include_e) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 0c77809079e..207dd024b6a 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -328,6 +328,16 @@ def async_track_state_change_event( return _async_track_state_change_event(hass, entity_ids, action, job_type) +@callback +def _async_dispatch_entity_id_event_soon( + hass: HomeAssistant, + callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]], + event: Event[_StateEventDataT], +) -> None: + """Dispatch to listeners soon to ensure one event loop runs before dispatch.""" + hass.loop.call_soon(_async_dispatch_entity_id_event, hass, callbacks, event) + + @callback def _async_dispatch_entity_id_event( hass: HomeAssistant, @@ -361,7 +371,7 @@ def _async_state_filter( _KEYED_TRACK_STATE_CHANGE = _KeyedEventTracker( key=_TRACK_STATE_CHANGE_DATA, event_type=EVENT_STATE_CHANGED, - dispatcher_callable=_async_dispatch_entity_id_event, + dispatcher_callable=_async_dispatch_entity_id_event_soon, filter_callable=_async_state_filter, ) diff --git a/homeassistant/helpers/floor_registry.py b/homeassistant/helpers/floor_registry.py index 9bf8a2a5d26..f14edef293a 100644 --- a/homeassistant/helpers/floor_registry.py +++ b/homeassistant/helpers/floor_registry.py @@ -5,10 +5,12 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -28,6 +30,7 @@ EVENT_FLOOR_REGISTRY_UPDATED: EventType[EventFloorRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.floor_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _FloorStoreData(TypedDict): @@ -38,6 +41,8 @@ class _FloorStoreData(TypedDict): icon: str | None level: int | None name: str + created_at: str + modified_at: str class FloorRegistryStoreData(TypedDict): @@ -66,6 +71,29 @@ class FloorEntry(NormalizedNameBaseRegistryEntry): level: int | None = None +class FloorRegistryStore(Store[FloorRegistryStoreData]): + """Store floor registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, list[dict[str, Any]]], + ) -> FloorRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for floor in old_data["floors"]: + floor["created_at"] = floor["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): """Class to hold a registry of floors.""" @@ -75,11 +103,12 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the floor registry.""" self.hass = hass - self._store = Store( + self._store = FloorRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -175,7 +204,7 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): ) -> FloorEntry: """Update name of the floor.""" old = self.floors[floor_id] - changes = { + changes: dict[str, Any] = { attr_name: value for attr_name, value in ( ("aliases", aliases), @@ -191,8 +220,10 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("floor_registry.async_update") - new = self.floors[floor_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.floors[floor_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -220,6 +251,8 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): name=floor["name"], level=floor["level"], normalized_name=normalized_name, + created_at=datetime.fromisoformat(floor["created_at"]), + modified_at=datetime.fromisoformat(floor["modified_at"]), ) self.floors = floors @@ -236,6 +269,8 @@ class FloorRegistry(BaseRegistry[FloorRegistryStoreData]): "icon": entry.icon, "level": entry.level, "name": entry.name, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.floors.values() ] diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index 1bf78ae3a29..be9b57bf814 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -7,7 +7,7 @@ import asyncio from collections.abc import Callable, Collection, Coroutine, Iterable import dataclasses from dataclasses import dataclass, field -from enum import Enum, auto +from enum import Enum, StrEnum, auto from functools import cached_property from itertools import groupby import logging @@ -54,6 +54,8 @@ INTENT_DECREASE_TIMER = "HassDecreaseTimer" INTENT_PAUSE_TIMER = "HassPauseTimer" INTENT_UNPAUSE_TIMER = "HassUnpauseTimer" INTENT_TIMER_STATUS = "HassTimerStatus" +INTENT_GET_CURRENT_DATE = "HassGetCurrentDate" +INTENT_GET_CURRENT_TIME = "HassGetCurrentTime" SLOT_SCHEMA = vol.Schema({}, extra=vol.ALLOW_EXTRA) @@ -818,6 +820,7 @@ class DynamicServiceIntentHandler(IntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create Service Intent Handler.""" self.intent_type = intent_type @@ -827,6 +830,7 @@ class DynamicServiceIntentHandler(IntentHandler): self.required_states = required_states self.description = description self.platforms = platforms + self.device_classes = device_classes self.required_slots: _IntentSlotsType = {} if required_slots: @@ -849,13 +853,38 @@ class DynamicServiceIntentHandler(IntentHandler): @cached_property def slot_schema(self) -> dict: """Return a slot schema.""" + domain_validator = ( + vol.In(list(self.required_domains)) if self.required_domains else cv.string + ) slot_schema = { vol.Any("name", "area", "floor"): non_empty_string, - vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("preferred_area_id"): cv.string, - vol.Optional("preferred_floor_id"): cv.string, + vol.Optional("domain"): vol.All(cv.ensure_list, [domain_validator]), } + if self.device_classes: + # The typical way to match enums is with vol.Coerce, but we build a + # flat list to make the API simpler to describe programmatically + flattened_device_classes = vol.In( + [ + device_class.value + for device_class_enum in self.device_classes + for device_class in device_class_enum + ] + ) + slot_schema.update( + { + vol.Optional("device_class"): vol.All( + cv.ensure_list, + [flattened_device_classes], + ) + } + ) + + slot_schema.update( + { + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, + } + ) if self.required_slots: slot_schema.update( @@ -908,9 +937,6 @@ class DynamicServiceIntentHandler(IntentHandler): if "domain" in slots: domains = set(slots["domain"]["value"]) - if self.required_domains: - # Must be a subset of intent's required domain(s) - domains.intersection_update(self.required_domains) if "device_class" in slots: device_classes = set(slots["device_class"]["value"]) @@ -1118,6 +1144,7 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create service handler.""" super().__init__( @@ -1130,6 +1157,7 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states=required_states, description=description, platforms=platforms, + device_classes=device_classes, ) self.domain = domain self.service = service diff --git a/homeassistant/helpers/json.py b/homeassistant/helpers/json.py index 28b3d509a0c..1145d785ed3 100644 --- a/homeassistant/helpers/json.py +++ b/homeassistant/helpers/json.py @@ -13,13 +13,39 @@ import orjson from homeassistant.util.file import write_utf8_file, write_utf8_file_atomic from homeassistant.util.json import ( # noqa: F401 - JSON_DECODE_EXCEPTIONS, - JSON_ENCODE_EXCEPTIONS, + JSON_DECODE_EXCEPTIONS as _JSON_DECODE_EXCEPTIONS, + JSON_ENCODE_EXCEPTIONS as _JSON_ENCODE_EXCEPTIONS, SerializationError, format_unserializable_data, - json_loads, + json_loads as _json_loads, ) +from .deprecation import ( + DeprecatedConstant, + all_with_deprecated_constants, + check_if_deprecated_constant, + deprecated_function, + dir_with_deprecated_constants, +) + +_DEPRECATED_JSON_DECODE_EXCEPTIONS = DeprecatedConstant( + _JSON_DECODE_EXCEPTIONS, "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", "2025.8" +) +_DEPRECATED_JSON_ENCODE_EXCEPTIONS = DeprecatedConstant( + _JSON_ENCODE_EXCEPTIONS, "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", "2025.8" +) +json_loads = deprecated_function( + "homeassistant.util.json.json_loads", breaks_in_ha_version="2025.8" +)(_json_loads) + +# These can be removed if no deprecated constant are in this module anymore +__getattr__ = partial(check_if_deprecated_constant, module_globals=globals()) +__dir__ = partial( + dir_with_deprecated_constants, module_globals_keys=[*globals().keys()] +) +__all__ = all_with_deprecated_constants(globals()) + + _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/helpers/label_registry.py b/homeassistant/helpers/label_registry.py index 64e884e1428..1007b17bc5d 100644 --- a/homeassistant/helpers/label_registry.py +++ b/homeassistant/helpers/label_registry.py @@ -5,10 +5,12 @@ from __future__ import annotations from collections.abc import Iterable import dataclasses from dataclasses import dataclass -from typing import Literal, TypedDict +from datetime import datetime +from typing import Any, Literal, TypedDict from homeassistant.core import Event, HomeAssistant, callback from homeassistant.util import slugify +from homeassistant.util.dt import utc_from_timestamp, utcnow from homeassistant.util.event_type import EventType from homeassistant.util.hass_dict import HassKey @@ -28,6 +30,7 @@ EVENT_LABEL_REGISTRY_UPDATED: EventType[EventLabelRegistryUpdatedData] = EventTy ) STORAGE_KEY = "core.label_registry" STORAGE_VERSION_MAJOR = 1 +STORAGE_VERSION_MINOR = 2 class _LabelStoreData(TypedDict): @@ -38,6 +41,8 @@ class _LabelStoreData(TypedDict): icon: str | None label_id: str name: str + created_at: str + modified_at: str class LabelRegistryStoreData(TypedDict): @@ -66,6 +71,29 @@ class LabelEntry(NormalizedNameBaseRegistryEntry): icon: str | None = None +class LabelRegistryStore(Store[LabelRegistryStoreData]): + """Store label registry data.""" + + async def _async_migrate_func( + self, + old_major_version: int, + old_minor_version: int, + old_data: dict[str, list[dict[str, Any]]], + ) -> LabelRegistryStoreData: + """Migrate to the new version.""" + if old_major_version > STORAGE_VERSION_MAJOR: + raise ValueError("Can't migrate to future version") + + if old_major_version == 1: + if old_minor_version < 2: + # Version 1.2 implements migration and adds created_at and modified_at + created_at = utc_from_timestamp(0).isoformat() + for label in old_data["labels"]: + label["created_at"] = label["modified_at"] = created_at + + return old_data # type: ignore[return-value] + + class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): """Class to hold a registry of labels.""" @@ -75,11 +103,12 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): def __init__(self, hass: HomeAssistant) -> None: """Initialize the label registry.""" self.hass = hass - self._store = Store( + self._store = LabelRegistryStore( hass, STORAGE_VERSION_MAJOR, STORAGE_KEY, atomic_writes=True, + minor_version=STORAGE_VERSION_MINOR, ) @callback @@ -175,7 +204,7 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): ) -> LabelEntry: """Update name of label.""" old = self.labels[label_id] - changes = { + changes: dict[str, Any] = { attr_name: value for attr_name, value in ( ("color", color), @@ -192,8 +221,10 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): if not changes: return old + changes["modified_at"] = utcnow() + self.hass.verify_event_loop_thread("label_registry.async_update") - new = self.labels[label_id] = dataclasses.replace(old, **changes) # type: ignore[arg-type] + new = self.labels[label_id] = dataclasses.replace(old, **changes) self.async_schedule_save() self.hass.bus.async_fire_internal( @@ -221,6 +252,8 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): label_id=label["label_id"], name=label["name"], normalized_name=normalized_name, + created_at=datetime.fromisoformat(label["created_at"]), + modified_at=datetime.fromisoformat(label["modified_at"]), ) self.labels = labels @@ -237,6 +270,8 @@ class LabelRegistry(BaseRegistry[LabelRegistryStoreData]): "icon": entry.icon, "label_id": entry.label_id, "name": entry.name, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), } for entry in self.labels.values() ] diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index f386fb3ddec..e37aa0c532d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -167,7 +167,7 @@ class APIInstance: async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType: """Call a LLM tool, validate args and return the response.""" async_conversation_trace_append( - ConversationTraceEventType.LLM_TOOL_CALL, + ConversationTraceEventType.TOOL_CALL, {"tool_name": tool_input.tool_name, "tool_args": tool_input.tool_args}, ) @@ -277,6 +277,8 @@ class AssistAPI(API): intent.INTENT_GET_STATE, intent.INTENT_NEVERMIND, intent.INTENT_TOGGLE, + intent.INTENT_GET_CURRENT_DATE, + intent.INTENT_GET_CURRENT_TIME, } def __init__(self, hass: HomeAssistant) -> None: @@ -322,8 +324,7 @@ class AssistAPI(API): ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just its name and its domain " - "(what comes before the dot in its entity id). " + "When controlling a device, prefer passing just name and domain. " "When controlling an area, prefer passing just area name and domain." ) ] @@ -361,7 +362,7 @@ class AssistAPI(API): prompt.append( "An overview of the areas and the devices in this smart home:" ) - prompt.append(yaml.dump(exposed_entities)) + prompt.append(yaml.dump(list(exposed_entities.values()))) return "\n".join(prompt) @@ -475,6 +476,7 @@ def _get_exposed_entities( info: dict[str, Any] = { "names": ", ".join(names), + "domain": state.domain, "state": state.state, } @@ -519,7 +521,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return convert(cv.CONDITIONS_SCHEMA) if isinstance(schema, selector.ConstantSelector): - return {"enum": [schema.config["value"]]} + return convert(vol.Schema(schema.config["value"])) result: dict[str, Any] if isinstance(schema, selector.ColorTempSelector): @@ -571,7 +573,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return result if isinstance(schema, selector.ObjectSelector): - return {"type": "object"} + return {"type": "object", "additionalProperties": True} if isinstance(schema, selector.SelectSelector): options = [ @@ -675,6 +677,19 @@ class ScriptTool(Tool): self.parameters = vol.Schema(schema) + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if self.description: + self.description = ( + self.description + ". Aliases: " + str(list(aliases)) + ) + else: + self.description = "Aliases: " + str(list(aliases)) + parameters_cache[entity_entry.unique_id] = ( self.description, self.parameters, diff --git a/homeassistant/helpers/normalized_name_base_registry.py b/homeassistant/helpers/normalized_name_base_registry.py index 1cffac9ffc5..7e7ca9ed884 100644 --- a/homeassistant/helpers/normalized_name_base_registry.py +++ b/homeassistant/helpers/normalized_name_base_registry.py @@ -1,8 +1,11 @@ """Provide a base class for registries that use a normalized name index.""" -from dataclasses import dataclass +from dataclasses import dataclass, field +from datetime import datetime from functools import lru_cache +from homeassistant.util import dt as dt_util + from .registry import BaseRegistryItems @@ -12,6 +15,8 @@ class NormalizedNameBaseRegistryEntry: name: str normalized_name: str + created_at: datetime = field(default_factory=dt_util.utcnow) + modified_at: datetime = field(default_factory=dt_util.utcnow) @lru_cache(maxsize=1024) diff --git a/homeassistant/helpers/recorder.py b/homeassistant/helpers/recorder.py index 6155fc9b320..59604944eeb 100644 --- a/homeassistant/helpers/recorder.py +++ b/homeassistant/helpers/recorder.py @@ -3,13 +3,25 @@ from __future__ import annotations import asyncio +from collections.abc import Callable, Generator +from contextlib import contextmanager from dataclasses import dataclass, field -from typing import Any +import functools +import logging +from typing import TYPE_CHECKING, Any from homeassistant.core import HomeAssistant, callback from homeassistant.util.hass_dict import HassKey +if TYPE_CHECKING: + from sqlalchemy.orm.session import Session + + from homeassistant.components.recorder import Recorder + +_LOGGER = logging.getLogger(__name__) + DOMAIN: HassKey[RecorderData] = HassKey("recorder") +DATA_INSTANCE: HassKey[Recorder] = HassKey("recorder_instance") @dataclass(slots=True) @@ -20,20 +32,32 @@ class RecorderData: db_connected: asyncio.Future[bool] = field(default_factory=asyncio.Future) +@callback def async_migration_in_progress(hass: HomeAssistant) -> bool: """Check to see if a recorder migration is in progress.""" - if "recorder" not in hass.config.components: - return False # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder return recorder.util.async_migration_in_progress(hass) +@callback +def async_migration_is_live(hass: HomeAssistant) -> bool: + """Check to see if a recorder migration is live.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.components import recorder + + return recorder.util.async_migration_is_live(hass) + + @callback def async_initialize_recorder(hass: HomeAssistant) -> None: """Initialize recorder data.""" + # pylint: disable-next=import-outside-toplevel + from homeassistant.components.recorder.basic_websocket_api import async_setup + hass.data[DOMAIN] = RecorderData() + async_setup(hass) async def async_wait_recorder(hass: HomeAssistant) -> bool: @@ -44,3 +68,45 @@ async def async_wait_recorder(hass: HomeAssistant) -> bool: if DOMAIN not in hass.data: return False return await hass.data[DOMAIN].db_connected + + +@functools.lru_cache(maxsize=1) +def get_instance(hass: HomeAssistant) -> Recorder: + """Get the recorder instance.""" + return hass.data[DATA_INSTANCE] + + +@contextmanager +def session_scope( + *, + hass: HomeAssistant | None = None, + session: Session | None = None, + exception_filter: Callable[[Exception], bool] | None = None, + read_only: bool = False, +) -> Generator[Session]: + """Provide a transactional scope around a series of operations. + + read_only is used to indicate that the session is only used for reading + data and that no commit is required. It does not prevent the session + from writing and is not a security measure. + """ + if session is None and hass is not None: + session = get_instance(hass).get_session() + + if session is None: + raise RuntimeError("Session required") + + need_rollback = False + try: + yield session + if not read_only and session.get_transaction(): + need_rollback = True + session.commit() + except Exception as err: + _LOGGER.exception("Error executing query") + if need_rollback: + session.rollback() + if not exception_filter or not exception_filter(err): + raise + finally: + session.close() diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index 84dabb114cd..a1b885d0c52 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Mapping, Sequence +from collections.abc import AsyncGenerator, Callable, Mapping, Sequence from contextlib import asynccontextmanager from contextvars import ContextVar from copy import copy @@ -13,10 +13,9 @@ from functools import cached_property, partial import itertools import logging from types import MappingProxyType -from typing import Any, Literal, TypedDict, cast +from typing import Any, Literal, TypedDict, cast, overload import async_interrupt -from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant import exceptions @@ -76,6 +75,7 @@ from homeassistant.core import ( HassJob, HomeAssistant, ServiceResponse, + State, SupportsResponse, callback, ) @@ -108,9 +108,7 @@ from .trace import ( trace_update_result, ) from .trigger import async_initialize_triggers, async_validate_trigger_config -from .typing import UNDEFINED, ConfigType, UndefinedType - -# mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs +from .typing import UNDEFINED, ConfigType, TemplateVarsType, UndefinedType SCRIPT_MODE_PARALLEL = "parallel" SCRIPT_MODE_QUEUED = "queued" @@ -178,7 +176,7 @@ def _set_result_unless_done(future: asyncio.Future[None]) -> None: future.set_result(None) -def action_trace_append(variables, path): +def action_trace_append(variables: dict[str, Any], path: str) -> TraceElement: """Append a TraceElement to trace[path].""" trace_element = TraceElement(variables, path) trace_append_element(trace_element, ACTION_TRACE_NODE_MAX_LEN) @@ -431,7 +429,7 @@ class _ScriptRun: if not self._stop.done(): self._script._changed() # noqa: SLF001 - async def _async_get_condition(self, config): + async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: return await self._script._async_get_condition(config) # noqa: SLF001 def _log( @@ -439,7 +437,7 @@ class _ScriptRun: ) -> None: self._script._log(msg, *args, level=level, **kwargs) # noqa: SLF001 - def _step_log(self, default_message, timeout=None): + def _step_log(self, default_message: str, timeout: float | None = None) -> None: self._script.last_action = self._action.get(CONF_ALIAS, default_message) _timeout = ( "" if timeout is None else f" (timeout: {timedelta(seconds=timeout)})" @@ -581,7 +579,7 @@ class _ScriptRun: if not isinstance(exception, exceptions.HomeAssistantError): raise exception - def _log_exception(self, exception): + def _log_exception(self, exception: Exception) -> None: action_type = cv.determine_script_action(self._action) error = str(exception) @@ -630,7 +628,7 @@ class _ScriptRun: ) raise _AbortScript from ex - async def _async_delay_step(self): + async def _async_delay_step(self) -> None: """Handle delay.""" delay_delta = self._get_pos_time_period_template(CONF_DELAY) @@ -662,7 +660,7 @@ class _ScriptRun: return self._get_pos_time_period_template(CONF_TIMEOUT).total_seconds() return None - async def _async_wait_template_step(self): + async def _async_wait_template_step(self) -> None: """Handle a wait template.""" timeout = self._get_timeout_seconds_from_action() self._step_log("wait template", timeout) @@ -691,7 +689,9 @@ class _ScriptRun: futures.append(done) @callback - def async_script_wait(entity_id, from_s, to_s): + def async_script_wait( + entity_id: str, from_s: State | None, to_s: State | None + ) -> None: """Handle script after template condition is true.""" self._async_set_remaining_time_var(timeout_handle) self._variables["wait"]["completed"] = True @@ -728,7 +728,7 @@ class _ScriptRun: except ScriptStoppedError as ex: raise asyncio.CancelledError from ex - async def _async_call_service_step(self): + async def _async_call_service_step(self) -> None: """Call the service specified in the action.""" self._step_log("call service") @@ -775,14 +775,14 @@ class _ScriptRun: if response_variable: self._variables[response_variable] = response_data - async def _async_device_step(self): + async def _async_device_step(self) -> None: """Perform the device automation specified in the action.""" self._step_log("device automation") await device_action.async_call_action_from_config( self._hass, self._action, self._variables, self._context ) - async def _async_scene_step(self): + async def _async_scene_step(self) -> None: """Activate the scene specified in the action.""" self._step_log("activate scene") trace_set_result(scene=self._action[CONF_SCENE]) @@ -794,7 +794,7 @@ class _ScriptRun: context=self._context, ) - async def _async_event_step(self): + async def _async_event_step(self) -> None: """Fire an event.""" self._step_log(self._action.get(CONF_ALIAS, self._action[CONF_EVENT])) event_data = {} @@ -816,7 +816,7 @@ class _ScriptRun: self._action[CONF_EVENT], event_data, context=self._context ) - async def _async_condition_step(self): + async def _async_condition_step(self) -> None: """Test if condition is matching.""" self._script.last_action = self._action.get( CONF_ALIAS, self._action[CONF_CONDITION] @@ -836,12 +836,19 @@ class _ScriptRun: if not check: raise _ConditionFail - def _test_conditions(self, conditions, name, condition_path=None): + def _test_conditions( + self, + conditions: list[ConditionCheckerType], + name: str, + condition_path: str | None = None, + ) -> bool | None: if condition_path is None: condition_path = name @trace_condition_function - def traced_test_conditions(hass, variables): + def traced_test_conditions( + hass: HomeAssistant, variables: TemplateVarsType + ) -> bool | None: try: with trace_path(condition_path): for idx, cond in enumerate(conditions): @@ -857,7 +864,7 @@ class _ScriptRun: return traced_test_conditions(self._hass, self._variables) @async_trace_path("repeat") - async def _async_repeat_step(self): # noqa: C901 + async def _async_repeat_step(self) -> None: # noqa: C901 """Repeat a sequence.""" description = self._action.get(CONF_ALIAS, "sequence") repeat = self._action[CONF_REPEAT] @@ -877,7 +884,7 @@ class _ScriptRun: script = self._script._get_repeat_script(self._step) # noqa: SLF001 warned_too_many_loops = False - async def async_run_sequence(iteration, extra_msg=""): + async def async_run_sequence(iteration: int, extra_msg: str = "") -> None: self._log("Repeating %s: Iteration %i%s", description, iteration, extra_msg) with trace_path("sequence"): await self._async_run_script(script) @@ -1053,7 +1060,7 @@ class _ScriptRun: """If sequence.""" if_data = await self._script._async_get_if_data(self._step) # noqa: SLF001 - test_conditions = False + test_conditions: bool | None = False try: with trace_path("if"): test_conditions = self._test_conditions( @@ -1073,6 +1080,26 @@ class _ScriptRun: with trace_path("else"): await self._async_run_script(if_data["if_else"]) + @overload + def _async_futures_with_timeout( + self, + timeout: float, + ) -> tuple[ + list[asyncio.Future[None]], + asyncio.TimerHandle, + asyncio.Future[None], + ]: ... + + @overload + def _async_futures_with_timeout( + self, + timeout: None, + ) -> tuple[ + list[asyncio.Future[None]], + None, + None, + ]: ... + def _async_futures_with_timeout( self, timeout: float | None, @@ -1099,7 +1126,7 @@ class _ScriptRun: futures.append(timeout_future) return futures, timeout_handle, timeout_future - async def _async_wait_for_trigger_step(self): + async def _async_wait_for_trigger_step(self) -> None: """Wait for a trigger event.""" timeout = self._get_timeout_seconds_from_action() @@ -1120,12 +1147,14 @@ class _ScriptRun: done = self._hass.loop.create_future() futures.append(done) - async def async_done(variables, context=None): + async def async_done( + variables: dict[str, Any], context: Context | None = None + ) -> None: self._async_set_remaining_time_var(timeout_handle) self._variables["wait"]["trigger"] = variables["trigger"] _set_result_unless_done(done) - def log_cb(level, msg, **kwargs): + def log_cb(level: int, msg: str, **kwargs: Any) -> None: self._log(msg, level=level, **kwargs) remove_triggers = await async_initialize_triggers( @@ -1169,14 +1198,14 @@ class _ScriptRun: unsub() - async def _async_variables_step(self): + async def _async_variables_step(self) -> None: """Set a variable value.""" self._step_log("setting variables") self._variables = self._action[CONF_VARIABLES].async_render( self._hass, self._variables, render_as_defaults=False ) - async def _async_set_conversation_response_step(self): + async def _async_set_conversation_response_step(self) -> None: """Set conversation response.""" self._step_log("setting conversation response") resp: template.Template | None = self._action[CONF_SET_CONVERSATION_RESPONSE] @@ -1188,7 +1217,7 @@ class _ScriptRun: ) trace_set_result(conversation_response=self._conversation_response) - async def _async_stop_step(self): + async def _async_stop_step(self) -> None: """Stop script execution.""" stop = self._action[CONF_STOP] error = self._action.get(CONF_ERROR, False) @@ -1321,7 +1350,7 @@ async def _async_stop_scripts_at_shutdown(hass: HomeAssistant, event: Event) -> ) -type _VarsType = dict[str, Any] | MappingProxyType +type _VarsType = dict[str, Any] | MappingProxyType[str, Any] def _referenced_extract_ids(data: Any, key: str, found: set[str]) -> None: @@ -1359,7 +1388,7 @@ class ScriptRunResult: conversation_response: str | None | UndefinedType service_response: ServiceResponse - variables: dict + variables: dict[str, Any] class Script: @@ -1414,7 +1443,7 @@ class Script: self._set_logger(logger) self._log_exceptions = log_exceptions - self.last_action = None + self.last_action: str | None = None self.last_triggered: datetime | None = None self._runs: list[_ScriptRun] = [] @@ -1422,7 +1451,7 @@ class Script: self._max_exceeded = max_exceeded if script_mode == SCRIPT_MODE_QUEUED: self._queue_lck = asyncio.Lock() - self._config_cache: dict[set[tuple], Callable[..., bool]] = {} + self._config_cache: dict[frozenset[tuple[str, str]], ConditionCheckerType] = {} self._repeat_script: dict[int, Script] = {} self._choose_data: dict[int, _ChooseData] = {} self._if_data: dict[int, _IfData] = {} @@ -1715,9 +1744,11 @@ class Script: variables["context"] = context elif self._copy_variables_on_run: - variables = cast(dict, copy(run_variables)) + # This is not the top level script, variables have been turned to a dict + variables = cast(dict[str, Any], copy(run_variables)) else: - variables = cast(dict, run_variables) + # This is not the top level script, variables have been turned to a dict + variables = cast(dict[str, Any], run_variables) # Prevent non-allowed recursive calls which will cause deadlocks when we try to # stop (restart) or wait for (queued) our own script run. @@ -1746,9 +1777,7 @@ class Script: cls = _ScriptRun else: cls = _QueuedScriptRun - run = cls( - self._hass, self, cast(dict, variables), context, self._log_exceptions - ) + run = cls(self._hass, self, variables, context, self._log_exceptions) has_existing_runs = bool(self._runs) self._runs.append(run) if self.script_mode == SCRIPT_MODE_RESTART and has_existing_runs: @@ -1773,7 +1802,9 @@ class Script: self._changed() raise - async def _async_stop(self, aws: list[asyncio.Task], update_state: bool) -> None: + async def _async_stop( + self, aws: list[asyncio.Task[None]], update_state: bool + ) -> None: await asyncio.wait(aws) if update_state: self._changed() @@ -1792,11 +1823,8 @@ class Script: return await asyncio.shield(create_eager_task(self._async_stop(aws, update_state))) - async def _async_get_condition(self, config): - if isinstance(config, template.Template): - config_cache_key = config.template - else: - config_cache_key = frozenset((k, str(v)) for k, v in config.items()) + async def _async_get_condition(self, config: ConfigType) -> ConditionCheckerType: + config_cache_key = frozenset((k, str(v)) for k, v in config.items()) if not (cond := self._config_cache.get(config_cache_key)): cond = await condition.async_from_config(self._hass, config) self._config_cache[config_cache_key] = cond diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 5a542657d10..025b8de8896 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -725,6 +725,7 @@ class DurationSelectorConfig(TypedDict, total=False): """Class to represent a duration selector config.""" enable_day: bool + enable_millisecond: bool allow_negative: bool @@ -739,6 +740,8 @@ class DurationSelector(Selector[DurationSelectorConfig]): # Enable day field in frontend. A selection with `days` set is allowed # even if `enable_day` is not set vol.Optional("enable_day"): cv.boolean, + # Enable millisecond field in frontend. + vol.Optional("enable_millisecond"): cv.boolean, # Allow negative durations. Will default to False in HA Core 2025.6.0. vol.Optional("allow_negative"): cv.boolean, } diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 35c682437cb..58cd4657301 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -20,8 +20,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FLOOR_ID, ATTR_LABEL_ID, + CONF_ACTION, CONF_ENTITY_ID, - CONF_SERVICE, CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE, CONF_SERVICE_TEMPLATE, @@ -358,8 +358,8 @@ def async_prepare_call_from_config( f"Invalid config for calling service: {ex}" ) from ex - if CONF_SERVICE in config: - domain_service = config[CONF_SERVICE] + if CONF_ACTION in config: + domain_service = config[CONF_ACTION] else: domain_service = config[CONF_SERVICE_TEMPLATE] diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index cc619e25aed..7742418c5a7 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -6,11 +6,11 @@ from ast import literal_eval import asyncio import base64 import collections.abc -from collections.abc import Callable, Iterable +from collections.abc import Callable, Generator, Iterable from contextlib import AbstractContextManager from contextvars import ContextVar from datetime import date, datetime, time, timedelta -from functools import cache, lru_cache, partial, wraps +from functools import cache, cached_property, lru_cache, partial, wraps import json import logging import math @@ -34,7 +34,6 @@ from jinja2.sandbox import ImmutableSandboxedEnvironment from jinja2.utils import Namespace from lru import LRU import orjson -from typing_extensions import Generator import voluptuous as vol from homeassistant.const import ( @@ -1023,7 +1022,7 @@ class TemplateStateBase(State): return self.state_with_unit raise KeyError - @property + @cached_property def entity_id(self) -> str: # type: ignore[override] """Wrap State.entity_id. @@ -2328,7 +2327,7 @@ def regex_match(value, find="", ignorecase=False): """Match value using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).match(value)) @@ -2339,7 +2338,7 @@ def regex_replace(value="", find="", replace="", ignorecase=False): """Replace using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).sub(replace, value) @@ -2347,7 +2346,7 @@ def regex_search(value, find="", ignorecase=False): """Search using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return bool(_regex_cache(find, flags).search(value)) @@ -2360,7 +2359,7 @@ def regex_findall(value, find="", ignorecase=False): """Find all matches using regex.""" if not isinstance(value, str): value = str(value) - flags = re.I if ignorecase else 0 + flags = re.IGNORECASE if ignorecase else 0 return _regex_cache(find, flags).findall(value) diff --git a/homeassistant/helpers/trace.py b/homeassistant/helpers/trace.py index 6f29ff23bec..431a7a7d1f8 100644 --- a/homeassistant/helpers/trace.py +++ b/homeassistant/helpers/trace.py @@ -3,14 +3,12 @@ from __future__ import annotations from collections import deque -from collections.abc import Callable, Coroutine +from collections.abc import Callable, Coroutine, Generator from contextlib import contextmanager from contextvars import ContextVar from functools import wraps from typing import Any -from typing_extensions import Generator - from homeassistant.core import ServiceResponse import homeassistant.util.dt as dt_util @@ -36,7 +34,7 @@ class TraceElement: """Container for trace data.""" self._child_key: str | None = None self._child_run_id: str | None = None - self._error: Exception | None = None + self._error: BaseException | None = None self.path: str = path self._result: dict[str, Any] | None = None self.reuse_by_child = False @@ -54,7 +52,7 @@ class TraceElement: self._child_key = child_key self._child_run_id = child_run_id - def set_error(self, ex: Exception) -> None: + def set_error(self, ex: BaseException | None) -> None: """Set error.""" self._error = ex diff --git a/homeassistant/helpers/update_coordinator.py b/homeassistant/helpers/update_coordinator.py index 8451c69d2b3..4fe4953d752 100644 --- a/homeassistant/helpers/update_coordinator.py +++ b/homeassistant/helpers/update_coordinator.py @@ -4,8 +4,9 @@ from __future__ import annotations from abc import abstractmethod import asyncio -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import Awaitable, Callable, Coroutine, Generator from datetime import datetime, timedelta +from functools import cached_property import logging from random import randint from time import monotonic @@ -14,7 +15,7 @@ import urllib.error import aiohttp import requests -from typing_extensions import Generator, TypeVar +from typing_extensions import TypeVar from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -70,6 +71,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): name: str, update_interval: timedelta | None = None, update_method: Callable[[], Awaitable[_DataT]] | None = None, + setup_method: Callable[[], Awaitable[None]] | None = None, request_refresh_debouncer: Debouncer[Coroutine[Any, Any, None]] | None = None, always_update: bool = True, ) -> None: @@ -78,6 +80,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger = logger self.name = name self.update_method = update_method + self.setup_method = setup_method self._update_interval_seconds: float | None = None self.update_interval = update_interval self._shutdown_requested = False @@ -274,15 +277,54 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): fails. Additionally logging is handled by config entry setup to ensure that multiple retries do not cause log spam. """ - await self._async_refresh( - log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True - ) - if self.last_update_success: - return + if await self.__wrap_async_setup(): + await self._async_refresh( + log_failures=False, raise_on_auth_failed=True, raise_on_entry_error=True + ) + if self.last_update_success: + return ex = ConfigEntryNotReady() ex.__cause__ = self.last_exception raise ex + async def __wrap_async_setup(self) -> bool: + """Error handling for _async_setup.""" + try: + await self._async_setup() + except ( + TimeoutError, + requests.exceptions.Timeout, + aiohttp.ClientError, + requests.exceptions.RequestException, + urllib.error.URLError, + UpdateFailed, + ) as err: + self.last_exception = err + + except (ConfigEntryError, ConfigEntryAuthFailed) as err: + self.last_exception = err + self.last_update_success = False + raise + + except Exception as err: # pylint: disable=broad-except + self.last_exception = err + self.logger.exception("Unexpected error fetching %s data", self.name) + else: + return True + + self.last_update_success = False + return False + + async def _async_setup(self) -> None: + """Set up the coordinator. + + Can be overwritten by integrations to load data or resources + only once during the first refresh. + """ + if self.setup_method is None: + return None + return await self.setup_method() + async def async_refresh(self) -> None: """Refresh data and log errors.""" await self._async_refresh(log_failures=True) @@ -392,7 +434,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]): self.logger.debug( "Finished fetching %s data in %.3f seconds (success: %s)", self.name, - monotonic() - start, + monotonic() - start, # pylint: disable=possibly-used-before-assignment self.last_update_success, ) if not auth_failed and self._listeners and not self.hass.is_stopping: @@ -471,7 +513,7 @@ class BaseCoordinatorEntity[ self.coordinator = coordinator self.coordinator_context = context - @property + @cached_property def should_poll(self) -> bool: """No need to poll. Coordinator notifies entity of updates.""" return False diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 9acc1682602..90b88ba2109 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -945,7 +945,7 @@ class Integration: except IntegrationNotFound as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: we are unable to resolve" + "Unable to resolve dependencies for %s: unable to resolve" " (sub)dependency %s" ), self.domain, @@ -954,7 +954,7 @@ class Integration: except CircularDependency as err: _LOGGER.error( ( - "Unable to resolve dependencies for %s: it contains a circular" + "Unable to resolve dependencies for %s: it contains a circular" " dependency: %s -> %s" ), self.domain, diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index fcf79258c25..472134fea37 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -3,51 +3,51 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 -aiohttp-fast-url-dispatcher==0.3.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.9.5 +aiohttp==3.10.1 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.1.2 -async-upnp-client==0.39.0 +async-upnp-client==0.40.0 atomicwrites-homeassistant==1.4.1 attrs==23.2.0 -awesomeversion==24.2.0 -bcrypt==4.1.2 +awesomeversion==24.6.0 +bcrypt==4.1.3 bleak-retry-connector==3.5.0 bleak==0.22.2 bluetooth-adapters==0.19.3 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 -cryptography==42.0.8 +cryptography==43.0.0 dbus-fast==2.22.1 fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.2.0 habluetooth==3.1.3 hass-nabucasa==0.81.1 -hassil==1.7.1 +hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240710.0 -home-assistant-intents==2024.7.3 +home-assistant-frontend==20240806.1 +home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.9.15 +orjson==3.10.6 packaging>=23.1 paho-mqtt==1.6.1 -Pillow==10.3.0 +Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 PyJWT==2.8.0 +pymicro-vad==1.0.1 PyNaCl==1.5.0 -pyOpenSSL==24.1.0 +pyOpenSSL==24.2.1 pyserial==3.5 python-slugify==8.0.4 PyTurboJPEG==1.7.1 @@ -56,12 +56,11 @@ PyYAML==6.0.1 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.9.0 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 -voluptuous-openapi==0.0.4 +voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 -voluptuous==0.13.1 -webrtc-noise-gain==1.2.3 +voluptuous==0.15.2 yarl==1.9.4 zeroconf==0.132.2 @@ -187,8 +186,8 @@ dacite>=1.7.0 # Musle wheels for pandas 2.2.0 cannot be build for any architecture. pandas==2.1.4 -# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x -chacha20poly1305-reuseable>=0.12.1 +# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x +chacha20poly1305-reuseable>=0.13.0 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 9775a3fee45..102c48e1d07 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from collections import defaultdict -from collections.abc import Awaitable, Callable, Mapping +from collections.abc import Awaitable, Callable, Generator, Mapping import contextlib import contextvars from enum import StrEnum @@ -14,8 +14,6 @@ import time from types import ModuleType from typing import Any, Final, TypedDict -from typing_extensions import Generator - from . import config as conf_util, core, loader, requirements from .const import ( BASE_PLATFORMS, # noqa: F401 @@ -31,7 +29,7 @@ from .core import ( callback, ) from .exceptions import DependencyError, HomeAssistantError -from .helpers import singleton, translation +from .helpers import issue_registry as ir, singleton, translation from .helpers.issue_registry import IssueSeverity, async_create_issue from .helpers.typing import ConfigType from .util.async_ import create_eager_task @@ -283,6 +281,20 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") + if not hass.config.safe_mode: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"integration_not_found.{domain}", + is_fixable=True, + issue_domain=HOMEASSISTANT_DOMAIN, + severity=IssueSeverity.ERROR, + translation_key="integration_not_found", + translation_placeholders={ + "domain": domain, + }, + data={"domain": domain}, + ) return False log_error = partial(_log_error_setup_error, hass, domain, integration) diff --git a/homeassistant/util/__init__.py b/homeassistant/util/__init__.py index c9aa2817640..c2d825a1676 100644 --- a/homeassistant/util/__init__.py +++ b/homeassistant/util/__init__.py @@ -129,13 +129,11 @@ class Throttle: async def throttled_value() -> None: """Stand-in function for when real func is being throttled.""" - return None else: def throttled_value() -> None: # type: ignore[misc] """Stand-in function for when real func is being throttled.""" - return None if self.limit_no_throttle is not None: method = Throttle(self.limit_no_throttle)(method) diff --git a/homeassistant/util/color.py b/homeassistant/util/color.py index ab5c4037f9b..0745bc96dfb 100644 --- a/homeassistant/util/color.py +++ b/homeassistant/util/color.py @@ -244,7 +244,7 @@ def color_RGB_to_xy_brightness( y = Y / (X + Y + Z) # Brightness - Y = 1 if Y > 1 else Y + Y = min(Y, 1) brightness = round(Y * 255) # Check if the given xy value is within the color-reach of the lamp. diff --git a/homeassistant/util/json.py b/homeassistant/util/json.py index 1479550b615..fa67f6b1dcc 100644 --- a/homeassistant/util/json.py +++ b/homeassistant/util/json.py @@ -2,8 +2,6 @@ from __future__ import annotations -from collections.abc import Callable -import json import logging from os import PathLike from typing import Any @@ -12,8 +10,6 @@ import orjson from homeassistant.exceptions import HomeAssistantError -from .file import WriteError # noqa: F401 - _SENTINEL = object() _LOGGER = logging.getLogger(__name__) @@ -129,63 +125,9 @@ def load_json_object( raise HomeAssistantError(f"Expected JSON to be parsed as a dict got {type(value)}") -def save_json( - filename: str, - data: list | dict, - private: bool = False, - *, - encoder: type[json.JSONEncoder] | None = None, - atomic_writes: bool = False, -) -> None: - """Save JSON data to a file.""" - # pylint: disable-next=import-outside-toplevel - from homeassistant.helpers.frame import report - - report( - ( - "uses save_json from homeassistant.util.json module." - " This is deprecated and will stop working in Home Assistant 2022.4, it" - " should be updated to use homeassistant.helpers.json module instead" - ), - error_if_core=False, - ) - - # pylint: disable-next=import-outside-toplevel - import homeassistant.helpers.json as json_helper - - json_helper.save_json( - filename, data, private, encoder=encoder, atomic_writes=atomic_writes - ) - - def format_unserializable_data(data: dict[str, Any]) -> str: """Format output of find_paths in a friendly way. Format is comma separated: =() """ return ", ".join(f"{path}={value}({type(value)}" for path, value in data.items()) - - -def find_paths_unserializable_data( - bad_data: Any, *, dump: Callable[[Any], str] = json.dumps -) -> dict[str, Any]: - """Find the paths to unserializable data. - - This method is slow! Only use for error handling. - """ - # pylint: disable-next=import-outside-toplevel - from homeassistant.helpers.frame import report - - report( - ( - "uses find_paths_unserializable_data from homeassistant.util.json module." - " This is deprecated and will stop working in Home Assistant 2022.4, it" - " should be updated to use homeassistant.helpers.json module instead" - ), - error_if_core=False, - ) - - # pylint: disable-next=import-outside-toplevel - import homeassistant.helpers.json as json_helper - - return json_helper.find_paths_unserializable_data(bad_data, dump=dump) diff --git a/homeassistant/util/location.py b/homeassistant/util/location.py index 24c49c5427c..c00cf88699e 100644 --- a/homeassistant/util/location.py +++ b/homeassistant/util/location.py @@ -163,7 +163,8 @@ async def _get_whoami(session: aiohttp.ClientSession) -> dict[str, Any] | None: """Query whoami.home-assistant.io for location data.""" try: resp = await session.get( - WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, timeout=30 + WHOAMI_URL_DEV if HA_VERSION.endswith("0.dev0") else WHOAMI_URL, + timeout=aiohttp.ClientTimeout(total=30), ) except (aiohttp.ClientError, TimeoutError): return None diff --git a/homeassistant/util/timeout.py b/homeassistant/util/timeout.py index 72cabffeed6..821f502694b 100644 --- a/homeassistant/util/timeout.py +++ b/homeassistant/util/timeout.py @@ -61,18 +61,16 @@ class _GlobalFreezeContext: def _enter(self) -> None: """Run freeze.""" - if not self._manager.freezes_done: - return + if self._manager.freezes_done: + # Global reset + for task in self._manager.global_tasks: + task.pause() - # Global reset - for task in self._manager.global_tasks: - task.pause() - - # Zones reset - for zone in self._manager.zones.values(): - if not zone.freezes_done: - continue - zone.pause() + # Zones reset + for zone in self._manager.zones.values(): + if not zone.freezes_done: + continue + zone.pause() self._manager.global_freezes.append(self) diff --git a/homeassistant/util/ulid.py b/homeassistant/util/ulid.py index 65f1b8226c0..f4895f9d963 100644 --- a/homeassistant/util/ulid.py +++ b/homeassistant/util/ulid.py @@ -4,10 +4,12 @@ from __future__ import annotations from ulid_transform import ( bytes_to_ulid, + bytes_to_ulid_or_none, ulid_at_time, ulid_hex, ulid_now, ulid_to_bytes, + ulid_to_bytes_or_none, ) __all__ = [ @@ -17,6 +19,8 @@ __all__ = [ "ulid_to_bytes", "bytes_to_ulid", "ulid_now", + "ulid_to_bytes_or_none", + "bytes_to_ulid_or_none", ] diff --git a/homeassistant/util/yaml/objects.py b/homeassistant/util/yaml/objects.py index d35ba11d25e..7e4019331c6 100644 --- a/homeassistant/util/yaml/objects.py +++ b/homeassistant/util/yaml/objects.py @@ -29,7 +29,7 @@ class NodeStrClass(str): def __voluptuous_compile__(self, schema: vol.Schema) -> Any: """Needed because vol.Schema.compile does not handle str subclasses.""" - return _compile_scalar(self) + return _compile_scalar(self) # type: ignore[no-untyped-call] class NodeDictClass(dict): diff --git a/mypy.ini b/mypy.ini index d94e5a37194..0f4f8907612 100644 --- a/mypy.ini +++ b/mypy.ini @@ -85,6 +85,9 @@ disallow_any_generics = true [mypy-homeassistant.helpers.reload] disallow_any_generics = true +[mypy-homeassistant.helpers.script] +disallow_any_generics = true + [mypy-homeassistant.helpers.script_variables] disallow_any_generics = true @@ -732,6 +735,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.autarco.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.auth.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -942,6 +955,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bryant_evolution.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.bthome.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1413,6 +1436,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.elevenlabs.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.elgato.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2293,6 +2326,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.iotty.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ipp.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2533,6 +2576,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.linkplay.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.litejet.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2633,6 +2686,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.madvr.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.mailbox.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -3583,6 +3646,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.script.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.search.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index 67eea59bc9a..2c58e7aae15 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -79,7 +79,7 @@ _INNER_MATCH_POSSIBILITIES = [i + 1 for i in range(5)] _TYPE_HINT_MATCHERS.update( { f"x_of_y_{i}": re.compile( - rf"^(\w+)\[{_INNER_MATCH}" + f", {_INNER_MATCH}" * (i - 1) + r"\]$" + rf"^([\w\.]+)\[{_INNER_MATCH}" + f", {_INNER_MATCH}" * (i - 1) + r"\]$" ) for i in _INNER_MATCH_POSSIBILITIES } @@ -100,8 +100,9 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "aiohttp_client": "ClientSessionGenerator", "aiohttp_server": "Callable[[], TestServer]", "area_registry": "AreaRegistry", - "async_setup_recorder_instance": "RecorderInstanceGenerator", + "async_test_recorder": "RecorderInstanceGenerator", "caplog": "pytest.LogCaptureFixture", + "capsys": "pytest.CaptureFixture[str]", "current_request_with_host": "None", "device_registry": "DeviceRegistry", "enable_bluetooth": "None", @@ -145,6 +146,7 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "mock_tts_get_cache_files": "MagicMock", "mock_tts_init_cache_dir": "MagicMock", "mock_zeroconf": "MagicMock", + "monkeypatch": "pytest.MonkeyPatch", "mqtt_client_mock": "MqttMockPahoClient", "mqtt_mock": "MqttMockHAClient", "mqtt_mock_entry": "MqttMockHAClientGenerator", diff --git a/pylint/plugins/hass_imports.py b/pylint/plugins/hass_imports.py index b4d30be483d..57b71560b53 100644 --- a/pylint/plugins/hass_imports.py +++ b/pylint/plugins/hass_imports.py @@ -360,6 +360,12 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^RESULT_TYPE_(\w*)$"), ), ], + "homeassistant.helpers.config_validation": [ + ObsoleteImportMatch( + reason="should be imported from homeassistant/components/", + constant=re.compile(r"^PLATFORM_SCHEMA(_BASE)?$"), + ), + ], "homeassistant.helpers.device_registry": [ ObsoleteImportMatch( reason="replaced by DeviceEntryDisabler enum", @@ -386,12 +392,6 @@ _OBSOLETE_IMPORT: dict[str, list[ObsoleteImportMatch]] = { constant=re.compile(r"^IMPERIAL_SYSTEM$"), ), ], - "homeassistant.util.json": [ - ObsoleteImportMatch( - reason="moved to homeassistant.helpers.json", - constant=re.compile(r"^save_json|find_paths_unserializable_data$"), - ), - ], } diff --git a/pyproject.toml b/pyproject.toml index 55f96c3e0b9..dc943b0832a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.4" +version = "2024.8.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -24,17 +24,16 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.9.5", + "aiohttp==3.10.1", "aiohttp_cors==0.7.0", - "aiohttp-fast-url-dispatcher==0.3.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", "astral==2.2", "async-interrupt==1.1.2", "attrs==23.2.0", "atomicwrites-homeassistant==1.4.1", - "awesomeversion==24.2.0", - "bcrypt==4.1.2", + "awesomeversion==24.6.0", + "bcrypt==4.1.3", "certifi>=2021.5.30", "ciso8601==2.3.1", "fnv-hash-fast==0.5.0", @@ -50,10 +49,10 @@ dependencies = [ "lru-dict==1.3.0", "PyJWT==2.8.0", # PyJWT has loose dependency. We want the latest one. - "cryptography==42.0.8", - "Pillow==10.3.0", - "pyOpenSSL==24.1.0", - "orjson==3.9.15", + "cryptography==43.0.0", + "Pillow==10.4.0", + "pyOpenSSL==24.2.1", + "orjson==3.10.6", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", @@ -62,14 +61,14 @@ dependencies = [ "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==0.9.0", + "ulid-transform==0.13.1", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 "urllib3>=1.26.5,<2", - "voluptuous==0.13.1", + "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", - "voluptuous-openapi==0.0.4", + "voluptuous-openapi==0.0.5", "yarl==1.9.4", ] @@ -185,6 +184,7 @@ disable = [ "bidirectional-unicode", # PLE2502 "continue-in-finally", # PLE0116 "duplicate-bases", # PLE0241 + "misplaced-bare-raise", # PLE0704 "format-needs-mapping", # F502 "function-redefined", # F811 # Needed because ruff does not understand type of __all__ generated by a function @@ -457,16 +457,14 @@ filterwarnings = [ # Ignore custom pytest marks "ignore:Unknown pytest.mark.disable_autouse_fixture:pytest.PytestUnknownMarkWarning:tests.components.met", "ignore:Unknown pytest.mark.dataset:pytest.PytestUnknownMarkWarning:tests.components.screenlogic", - # https://github.com/rokam/sunweg/blob/3.0.1/sunweg/plant.py#L96 - v3.0.1 - 2024-05-29 + # https://github.com/rokam/sunweg/blob/3.0.2/sunweg/plant.py#L96 - v3.0.2 - 2024-07-10 "ignore:The '(kwh_per_kwp|performance_rate)' property is deprecated and will return 0:DeprecationWarning:tests.components.sunweg.test_init", # -- design choice 3rd party # https://github.com/gwww/elkm1/blob/2.2.7/elkm1_lib/util.py#L8-L19 "ignore:ssl.TLSVersion.TLSv1 is deprecated:DeprecationWarning:elkm1_lib.util", - # https://github.com/michaeldavie/env_canada/blob/v0.6.2/env_canada/ec_cache.py - "ignore:Inheritance class CacheClientSession from ClientSession is discouraged:DeprecationWarning:env_canada.ec_cache", # https://github.com/allenporter/ical/pull/215 - # https://github.com/allenporter/ical/blob/8.0.0/ical/util.py#L20-L22 + # https://github.com/allenporter/ical/blob/8.1.1/ical/util.py#L21-L23 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:ical.util", # https://github.com/bachya/regenmaschine/blob/2024.03.0/regenmaschine/client.py#L52 "ignore:ssl.TLSVersion.SSLv3 is deprecated:DeprecationWarning:regenmaschine.client", @@ -478,11 +476,11 @@ filterwarnings = [ "ignore:Deprecated call to `pkg_resources.declare_namespace\\(('azure'|'google.*'|'pywinusb'|'repoze'|'xbox'|'zope')\\)`:DeprecationWarning:pkg_resources", # -- tracked upstream / open PRs + # https://github.com/ronf/asyncssh/issues/674 - v2.15.0 + "ignore:ARC4 has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.ARC4 and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", + "ignore:TripleDES has been moved to cryptography.hazmat.decrepit.ciphers.algorithms.TripleDES and will be removed from this module in 48.0.0:UserWarning:asyncssh.crypto.cipher", # https://github.com/certbot/certbot/issues/9828 - v2.10.0 "ignore:X509Extension support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", - # https://github.com/influxdata/influxdb-client-python/issues/603 - v1.42.0 - # https://github.com/influxdata/influxdb-client-python/pull/652 - "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", # https://github.com/beetbox/mediafile/issues/67 - v0.12.0 "ignore:'imghdr' is deprecated and slated for removal in Python 3.13:DeprecationWarning:mediafile", # https://github.com/foxel/python_ndms2_client/issues/6 - v0.1.3 @@ -500,8 +498,9 @@ filterwarnings = [ "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:devialet.devialet_api", # https://github.com/httplib2/httplib2/pull/226 - >=0.21.0 "ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:httplib2", - # https://github.com/jaraco/jaraco.abode/commit/9e3e789efc96cddcaa15f920686bbeb79a7469e0 - update jaraco.abode to >=5.1.0 - "ignore:`jaraco.functools.call_aside` is deprecated, use `jaraco.functools.invoke` instead:DeprecationWarning:jaraco.abode.helpers.timeline", + # https://github.com/influxdata/influxdb-client-python/issues/603 >1.45.0 + # https://github.com/influxdata/influxdb-client-python/pull/652 + "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:influxdb_client.client.write.point", # https://github.com/majuss/lupupy/pull/15 - >0.3.2 "ignore:\"is not\" with 'str' literal. Did you mean \"!=\"?:SyntaxWarning:.*lupupy.devices.alarm", # https://github.com/nextcord/nextcord/pull/1095 - >2.6.1 @@ -522,10 +521,6 @@ filterwarnings = [ "ignore:invalid escape sequence:SyntaxWarning:.*stringcase", # https://github.com/mvantellingen/python-zeep/pull/1364 - >4.2.1 "ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:zeep.utils", - # https://github.com/timmo001/system-bridge-connector/pull/27 - >=4.1.0 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:systembridgeconnector.version", - # https://github.com/jschlyter/ttls/commit/d64f1251397b8238cf6a35bea64784de25e3386c - >=1.8.1 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:ttls", # -- fixed for Python 3.13 # https://github.com/rhasspy/wyoming/commit/e34af30d455b6f2bb9e5cfb25fad8d276914bc54 - >=1.4.2 @@ -544,7 +539,7 @@ filterwarnings = [ # https://pypi.org/project/emulated-roku/ - v0.3.0 - 2023-12-19 # https://github.com/martonperei/emulated_roku "ignore:loop argument is deprecated:DeprecationWarning:emulated_roku", - # https://github.com/thecynic/pylutron - v0.2.13 + # https://github.com/thecynic/pylutron - v0.2.15 "ignore:setDaemon\\(\\) is deprecated, set the daemon attribute instead:DeprecationWarning:pylutron", # https://github.com/pschmitt/pynuki/blob/1.6.3/pynuki/utils.py#L21 - v1.6.3 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:pynuki.utils", @@ -559,6 +554,9 @@ filterwarnings = [ # https://pypi.org/project/aprslib/ - v0.7.2 - 2022-07-10 "ignore:invalid escape sequence:SyntaxWarning:.*aprslib.parsing.common", "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:aprslib.parsing.common", + # https://pypi.org/project/panasonic-viera/ - v0.4.2 - 2024-04-24 + # https://github.com/florianholzapfel/panasonic-viera/blob/0.4.2/panasonic_viera/__init__.py#L789 + "ignore:invalid escape sequence:SyntaxWarning:.*panasonic_viera", # https://pypi.org/project/pyblackbird/ - v0.6 - 2023-03-15 # https://github.com/koolsb/pyblackbird/pull/9 -> closed "ignore:invalid escape sequence:SyntaxWarning:.*pyblackbird", @@ -583,9 +581,14 @@ filterwarnings = [ "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pybotvac.version", # https://github.com/home-assistant-ecosystem/python-mystrom/blob/2.2.0/pymystrom/__init__.py#L10 - v2.2.0 - 2023-05-21 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pymystrom", - # https://pypi.org/project/velbus-aio/ - v2024.4.1 - 2024-04-07 - # https://github.com/Cereal2nd/velbus-aio/blob/2024.4.1/velbusaio/handler.py#L12 + # https://pypi.org/project/velbus-aio/ - v2024.7.5 - 2024-07-05 + # https://github.com/Cereal2nd/velbus-aio/blob/2024.7.5/velbusaio/handler.py#L22 "ignore:pkg_resources is deprecated as an API:DeprecationWarning:velbusaio.handler", + # - pyOpenSSL v24.2.1 + # https://pypi.org/project/acme/ - v2.11.0 - 2024-06-06 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:acme.crypto_util", + # https://pypi.org/project/josepy/ - v1.14.0 - 2023-11-01 + "ignore:CSR support in pyOpenSSL is deprecated. You should use the APIs in cryptography:DeprecationWarning:josepy.util", # -- Python 3.13 # HomeAssistant @@ -595,9 +598,6 @@ filterwarnings = [ # https://github.com/nextcord/nextcord/issues/1174 # https://github.com/nextcord/nextcord/blob/v2.6.1/nextcord/player.py#L5 "ignore:'audioop' is deprecated and slated for removal in Python 3.13:DeprecationWarning:nextcord.player", - # https://pypi.org/project/pylutron/ - v0.2.12 - 2024-02-12 - # https://github.com/thecynic/pylutron/issues/89 - "ignore:'telnetlib' is deprecated and slated for removal in Python 3.13:DeprecationWarning:pylutron", # https://pypi.org/project/SpeechRecognition/ - v3.10.4 - 2024-05-05 # https://github.com/Uberi/speech_recognition/blob/3.10.4/speech_recognition/__init__.py#L7 "ignore:'aifc' is deprecated and slated for removal in Python 3.13:DeprecationWarning:speech_recognition", @@ -657,10 +657,6 @@ filterwarnings = [ "ignore:\"is\" with 'int' literal. Did you mean \"==\"?:SyntaxWarning:.*pyiss", # https://pypi.org/project/PyMetEireann/ - v2021.8.0 - 2021-08-16 "ignore:datetime.*utcnow\\(\\) is deprecated and scheduled for removal:DeprecationWarning:meteireann", - # https://pypi.org/project/pyowm/ - v3.3.0 - 2022-02-14 - # https://github.com/csparpa/pyowm/issues/435 - # https://github.com/csparpa/pyowm/blob/3.3.0/pyowm/commons/cityidregistry.py#L7 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:pyowm.commons.cityidregistry", # https://pypi.org/project/PyPasser/ - v0.0.5 - 2021-10-21 "ignore:invalid escape sequence:SyntaxWarning:.*pypasser.utils", # https://pypi.org/project/pyqwikswitch/ - v0.94 - 2019-08-19 @@ -670,16 +666,37 @@ filterwarnings = [ "ignore:datetime.*utcfromtimestamp\\(\\) is deprecated and scheduled for removal:DeprecationWarning:rx.internal.constants", # https://pypi.org/project/rxv/ - v0.7.0 - 2021-10-10 "ignore:defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead:DeprecationWarning:rxv.ssdp", - # https://pypi.org/project/webrtcvad/ - v2.0.10 - 2017-01-08 - "ignore:pkg_resources is deprecated as an API:DeprecationWarning:webrtcvad", +] + +[tool.coverage.run] +source = ["homeassistant"] + +[tool.coverage.report] +exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", + # Don't complain about missing debug-only code: + "def __repr__", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + # TYPE_CHECKING and @overload blocks are never executed during pytest run + "if TYPE_CHECKING:", + "@overload", ] [tool.ruff] -required-version = ">=0.4.8" +required-version = ">=0.5.3" [tool.ruff.lint] select = [ "A001", # Variable {name} is shadowing a Python builtin + "ASYNC210", # Async functions should not call blocking HTTP methods + "ASYNC220", # Async functions should not create subprocesses with blocking methods + "ASYNC221", # Async functions should not run processes with blocking methods + "ASYNC222", # Async functions should not wait on processes with blocking methods + "ASYNC230", # Async functions should not open files with blocking methods like open + "ASYNC251", # Async functions should not call time.sleep "B002", # Python does not support the unary prefix increment "B005", # Using .strip() with multi-character strings is misleading "B007", # Loop control variable {name} not used within loop body @@ -701,6 +718,7 @@ select = [ "E", # pycodestyle "F", # pyflakes/autoflake "FLY", # flynt + "FURB", # refurb "G", # flake8-logging-format "I", # isort "INP", # flake8-no-pep420 @@ -722,6 +740,7 @@ select = [ "RUF006", # Store a reference to the return value of asyncio.create_task "RUF010", # Use explicit conversion flag "RUF013", # PEP 484 prohibits implicit Optional + "RUF017", # Avoid quadratic list summation "RUF018", # Avoid assignment expressions in assert statements "RUF019", # Unnecessary key check before dictionary access # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up @@ -853,6 +872,7 @@ voluptuous = "vol" "homeassistant.components.wake_word.PLATFORM_SCHEMA" = "WAKE_WORD_PLATFORM_SCHEMA" "homeassistant.components.water_heater.PLATFORM_SCHEMA" = "WATER_HEATER_PLATFORM_SCHEMA" "homeassistant.components.weather.PLATFORM_SCHEMA" = "WEATHER_PLATFORM_SCHEMA" +"homeassistant.core.DOMAIN" = "HOMEASSISTANT_DOMAIN" "homeassistant.helpers.area_registry" = "ar" "homeassistant.helpers.category_registry" = "cr" "homeassistant.helpers.config_validation" = "cv" diff --git a/requirements.txt b/requirements.txt index 5b1c57c7e1c..1beefe73914 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,17 +4,16 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.9.5 +aiohttp==3.10.1 aiohttp_cors==0.7.0 -aiohttp-fast-url-dispatcher==0.3.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 astral==2.2 async-interrupt==1.1.2 attrs==23.2.0 atomicwrites-homeassistant==1.4.1 -awesomeversion==24.2.0 -bcrypt==4.1.2 +awesomeversion==24.6.0 +bcrypt==4.1.3 certifi>=2021.5.30 ciso8601==2.3.1 fnv-hash-fast==0.5.0 @@ -25,10 +24,10 @@ ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 PyJWT==2.8.0 -cryptography==42.0.8 -Pillow==10.3.0 -pyOpenSSL==24.1.0 -orjson==3.9.15 +cryptography==43.0.0 +Pillow==10.4.0 +pyOpenSSL==24.2.1 +orjson==3.10.6 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 @@ -37,9 +36,9 @@ PyYAML==6.0.1 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.9.0 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 -voluptuous==0.13.1 +voluptuous==0.15.2 voluptuous-serialize==2.6.0 -voluptuous-openapi==0.0.4 +voluptuous-openapi==0.0.5 yarl==1.9.4 diff --git a/requirements_all.txt b/requirements_all.txt index 6c7f7bbc000..b8f50d328f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.2 +AEMET-OpenData==0.5.3 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -12,11 +12,8 @@ AIOSomecomfort==0.0.25 # homeassistant.components.adax Adax-local==0.1.5 -# homeassistant.components.blinksticklight -BlinkStick==1.2.0 - # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 @@ -36,10 +33,10 @@ Mastodon.py==1.8.1 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.3.0 +Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.13 +PlexAPI==4.15.14 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -62,9 +59,6 @@ PyFronius==0.7.3 # homeassistant.components.pyload PyLoadAPI==1.3.2 -# homeassistant.components.mvglive -PyMVGLive==1.1.4 - # homeassistant.components.met_eireann PyMetEireann==2021.8.0 @@ -182,10 +176,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.6.1 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.1 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -195,16 +189,16 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.8 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.1.1 +aioaseko==0.2.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.4 +aioautomower==2024.7.3 # homeassistant.components.azure_devops aioazuredevops==2.1.1 @@ -213,7 +207,7 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 @@ -243,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.1 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -261,7 +255,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.1.5 +aiohomekit==3.2.1 # homeassistant.components.hue aiohue==4.7.2 @@ -279,7 +273,7 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.15 +aiolifx-themes==0.5.0 # homeassistant.components.lifx aiolifx==1.0.6 @@ -294,7 +288,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.5.0 +aiomealie==0.8.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -341,7 +335,7 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.3.5 +aioqsw==0.4.0 # homeassistant.components.rainforest_raven aioraven==0.7.0 @@ -355,6 +349,9 @@ aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed aioruckus==0.34 +# homeassistant.components.russound_rio +aiorussound==2.2.0 + # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -362,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.0.0 +aioshelly==11.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -410,10 +407,10 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.2 +aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.6.1 +airgradient==0.7.1 # homeassistant.components.airly airly==1.1.0 @@ -434,7 +431,7 @@ airtouch5py==0.2.10 alpha-vantage==2.3.1 # homeassistant.components.amberelectric -amberelectric==1.1.0 +amberelectric==1.1.1 # homeassistant.components.amcrest amcrest==1.9.8 @@ -490,7 +487,7 @@ asterisk_mbox==0.5.0 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.39.0 +async-upnp-client==0.40.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -513,6 +510,9 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.avea # avea==1.5.1 @@ -526,10 +526,10 @@ axis==62 azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==3.1.0 +azure-kusto-data[aio]==4.5.1 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==3.1.0 +azure-kusto-ingest==4.5.1 # homeassistant.components.azure_service_bus azure-servicebus==7.10.0 @@ -555,11 +555,8 @@ beautifulsoup4==4.12.3 # homeassistant.components.beewi_smartclim # beewi-smartclim==0.0.10 -# homeassistant.components.zha -bellows==0.39.1 - # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.15.3 +bimmer-connected[china]==0.16.1 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -603,7 +600,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 @@ -613,10 +610,13 @@ boschshcpy==0.2.91 # homeassistant.components.amazon_polly # homeassistant.components.route53 -boto3==1.34.51 +boto3==1.34.131 + +# homeassistant.components.aws +botocore==1.34.131 # homeassistant.components.bring -bring-api==0.7.1 +bring-api==0.8.1 # homeassistant.components.broadlink broadlink==0.19.0 @@ -669,9 +669,6 @@ colorlog==6.8.2 # homeassistant.components.color_extractor colorthief==0.2.1 -# homeassistant.components.concord232 -concord232==0.15 - # homeassistant.components.upc_connect connect-box==0.3.1 @@ -709,7 +706,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -734,6 +731,9 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 +# homeassistant.components.chacon_dio +dio-chacon-wifi-api==1.1.0 + # homeassistant.components.directv directv==0.4.0 @@ -743,9 +743,6 @@ discogs-client==2.3.0 # homeassistant.components.steamist discovery30303==0.3.2 -# homeassistant.components.dovado -dovado==0.4.1 - # homeassistant.components.dremel_3d_printer dremel3dpy==2.1.1 @@ -753,7 +750,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -782,6 +779,9 @@ ecoaliface==0.4.0 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 @@ -816,7 +816,7 @@ enocean==0.50 enturclient==0.2.4 # homeassistant.components.environment_canada -env-canada==0.7.1 +env-canada==0.7.2 # homeassistant.components.season ephem==4.1.5 @@ -848,6 +848,9 @@ eufylife-ble-client==0.1.8 # homeassistant.components.evohome evohome-async==0.4.20 +# homeassistant.components.bryant_evolution +evolutionhttp==0.0.18 + # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -942,7 +945,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.7 +georss-qld-bushfire-alert-client==0.8 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -974,7 +977,7 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.13.11 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.12.3 +google-cloud-texttospeech==2.16.3 # homeassistant.components.google_generative_ai_conversation google-generativeai==0.6.0 @@ -992,7 +995,7 @@ goslide-api==0.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.3 +govee-ble==0.40.0 # homeassistant.components.govee_light_local govee-local-api==1.5.1 @@ -1004,7 +1007,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 @@ -1056,7 +1059,7 @@ hass-nabucasa==0.81.1 hass-splunk==0.1.1 # homeassistant.components.conversation -hassil==1.7.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -1090,13 +1093,13 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240710.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.3 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect -homeconnect==0.7.2 +homeconnect==0.8.0 # homeassistant.components.homematicip_cloud homematicip==1.1.1 @@ -1152,7 +1155,7 @@ ihcsdk==2.8.5 imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.3 +incomfort-client==0.6.3-1 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -1169,6 +1172,9 @@ insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire intellifire4py==2.2.2 +# homeassistant.components.iotty +iottycloud==0.1.3 + # homeassistant.components.iperf3 iperf3==0.1.11 @@ -1178,12 +1184,16 @@ isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 + # homeassistant.components.abode jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 +# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 @@ -1206,7 +1216,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 @@ -1221,7 +1231,7 @@ lacrosse-view==1.0.1 lakeside==0.13 # homeassistant.components.laundrify -laundrify-aio==1.1.2 +laundrify-aio==1.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1317,7 +1327,7 @@ meteoalertapi==0.3.0 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1344,7 +1354,7 @@ moehlenhoff-alpha2==1.3.1 monzopy==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.7.0 +mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds motionblinds==0.6.23 @@ -1389,7 +1399,7 @@ netdata==1.1.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.2.0 +nettigo-air-monitor==3.3.0 # homeassistant.components.neurio_energy neurio==0.3.1 @@ -1398,7 +1408,7 @@ neurio==0.3.1 nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.0 +nextcloudmonitor==1.5.1 # homeassistant.components.discord nextcord==2.6.0 @@ -1407,7 +1417,7 @@ nextcord==2.6.0 nextdns==3.1.0 # homeassistant.components.nibe_heatpump -nibe==2.8.0 +nibe==2.11.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1459,7 +1469,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1501,7 +1511,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.5.2 +opower==0.6.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1633,11 +1643,14 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 +# homeassistant.components.madvr +py-madvr2==1.6.29 + # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==1.0.2 +py-nextbusnext==2.0.3 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1654,9 +1667,6 @@ py-synologydsm-api==2.4.4 # homeassistant.components.zabbix py-zabbix==1.1.7 -# homeassistant.components.seventeentrack -py17track==2021.12.2 - # homeassistant.components.atome pyAtome==0.1.1 @@ -1670,7 +1680,7 @@ pyControl4==1.1.0 pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.3 +pyElectra==1.2.4 # homeassistant.components.emby pyEmby==1.9 @@ -1736,6 +1746,9 @@ pybbox==0.0.5-alpha # homeassistant.components.blackbird pyblackbird==0.6 +# homeassistant.components.bluesound +pyblu==0.4.0 + # homeassistant.components.neato pybotvac==0.0.25 @@ -1773,7 +1786,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.11.1 +pydaikin==2.13.1 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1827,7 +1840,7 @@ pyeiscp==0.0.7 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.envisalink pyenvisalink==4.7 @@ -1893,7 +1906,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.1.0 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1980,7 +1993,7 @@ pylitterbot==2023.5.0 pylutron-caseta==0.20.0 # homeassistant.components.lutron -pylutron==0.2.13 +pylutron==0.2.15 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1997,6 +2010,9 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.1 + # homeassistant.components.xiaomi_tv pymitv==1.4.3 @@ -2015,6 +2031,9 @@ pymsteams==0.1.12 # homeassistant.components.mysensors pymysensors==0.24.0 +# homeassistant.components.iron_os +pynecil==0.2.0 + # homeassistant.components.netgear pynetgear==0.10.10 @@ -2031,7 +2050,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.5 +pynx584==0.8.2 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -2069,7 +2088,7 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.11 +pyoverkiz==1.13.14 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -2144,19 +2163,20 @@ pyschlage==2024.6.0 pysensibo==1.0.36 # homeassistant.components.serial -# homeassistant.components.zha -pyserial-asyncio-fast==0.11 +pyserial-asyncio-fast==0.13 # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb -# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 # homeassistant.components.sesame pysesame2==1.0.1 +# homeassistant.components.seventeentrack +pyseventeentrack==1.0.0 + # homeassistant.components.sia pysiaalarm==3.1.1 @@ -2203,7 +2223,7 @@ pystiebeleltron==0.0.1.dev2 pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.0 +pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 @@ -2257,10 +2277,10 @@ python-gc100==1.0.3a0 python-gitlab==1.6.0 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.0.0 +python-homewizard-energy==v6.1.1 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2275,13 +2295,16 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.5 +python-kasa[speedups]==0.7.1 + +# homeassistant.components.linkplay +python-linkplay==0.0.6 # homeassistant.components.lirc # python-lirc==1.2.3 # homeassistant.components.matter -python-matter-server==6.2.2 +python-matter-server==6.3.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -2324,7 +2347,7 @@ python-songpal==0.16.2 python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.2.2 +python-technove==1.3.1 # homeassistant.components.telegram_bot python-telegram-bot[socks]==21.0.1 @@ -2355,10 +2378,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.10 +pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 @@ -2376,7 +2399,7 @@ pyvera==0.3.13 pyversasense==0.0.6 # homeassistant.components.vesync -pyvesync==2.1.10 +pyvesync==2.1.12 # homeassistant.components.vizio pyvizio==0.1.61 @@ -2448,19 +2471,19 @@ rapt-ble==0.1.2 raspyrfm-client==1.2.8 # homeassistant.components.refoss -refoss-ha==1.2.1 +refoss-ha==1.2.4 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.4 +renault-api==0.2.5 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.5 +reolink-aio==0.9.7 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2469,7 +2492,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.11 +ring-doorbell[listen]==0.8.12 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2501,9 +2524,6 @@ rpi-bad-power==0.1.0 # homeassistant.components.rtsp_to_webrtc rtsp-to-webrtc==0.5.1 -# homeassistant.components.russound_rio -russound-rio==1.0.0 - # homeassistant.components.russound_rnet russound==0.1.9 @@ -2565,6 +2585,9 @@ sharp_aquos_rc==0.3.2 # homeassistant.components.shodan shodan==1.28.0 +# homeassistant.components.simplefin +simplefin4py==0.0.18 + # homeassistant.components.sighthound simplehound==0.3 @@ -2671,16 +2694,16 @@ surepy==0.9.0 swisshydrodata==0.1.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.1.0 +switchbot-api==2.2.1 # homeassistant.components.synology_srm synology-srm==0.2.0 # homeassistant.components.system_bridge -systembridgeconnector==4.0.3 +systembridgeconnector==4.1.0 # homeassistant.components.system_bridge -systembridgemodels==4.0.4 +systembridgemodels==4.1.0 # homeassistant.components.tailscale tailscale==0.6.1 @@ -2709,9 +2732,10 @@ temperusb==1.6.1 # homeassistant.components.tensorflow # tensorflow==2.5.0 +# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.6.2 +tesla-fleet-api==0.7.3 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2720,13 +2744,13 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.tensorflow # tf-models-official==2.5.0 # homeassistant.components.thermobeacon -thermobeacon-ble==0.6.2 +thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 @@ -2771,7 +2795,7 @@ transmission-rpc==7.0.3 ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.0.0 +ttn_client==1.1.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 @@ -2783,13 +2807,13 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.0.0 +twitchAPI==4.2.1 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==4.2.0 +uiprotect==5.4.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2804,7 +2828,7 @@ unifi_ap==0.0.1 unifiled==0.11 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.22 # homeassistant.components.upb upb-lib==0.5.8 @@ -2830,7 +2854,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.5 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2864,7 +2888,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.6.0 +wallbox==0.7.0 # homeassistant.components.folder_watcher watchdog==2.3.1 @@ -2878,9 +2902,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2894,7 +2915,7 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.18.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 @@ -2909,12 +2930,11 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.0.0 # homeassistant.components.knx xknxproject==3.7.1 -# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -2933,7 +2953,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.2 +yalexs==6.4.3 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2942,7 +2962,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 @@ -2951,7 +2971,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 @@ -2966,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.117 +zha==0.0.28 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 @@ -2974,21 +2994,6 @@ zhong-hong-hvac==1.0.12 # homeassistant.components.ziggo_mediabox_xl ziggo-mediabox-xl==1.1.0 -# homeassistant.components.zha -zigpy-deconz==0.23.2 - -# homeassistant.components.zha -zigpy-xbee==0.20.1 - -# homeassistant.components.zha -zigpy-zigate==0.12.1 - -# homeassistant.components.zha -zigpy-znp==0.12.2 - -# homeassistant.components.zha -zigpy==0.64.1 - # homeassistant.components.zoneminder zm-py==0.5.4 diff --git a/requirements_test.txt b/requirements_test.txt index 460da410db6..19a60b6aa28 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -7,17 +7,18 @@ -c homeassistant/package_constraints.txt -r requirements_test_pre_commit.txt -astroid==3.2.2 -coverage==7.5.3 -freezegun==1.5.0 +astroid==3.2.4 +coverage==7.6.0 +freezegun==1.5.1 mock-open==1.4.0 -mypy-dev==1.11.0a9 +mypy-dev==1.12.0a2 pre-commit==3.7.1 pydantic==1.10.17 -pylint==3.2.2 +pylint==3.2.6 pylint-per-file-ignores==1.3.2 -pipdeptree==2.19.0 -pytest-asyncio==0.23.6 +pipdeptree==2.23.1 +pip-licenses==4.5.1 +pytest-asyncio==0.23.8 pytest-aiohttp==1.0.5 pytest-cov==5.0.0 pytest-freezer==0.4.8 @@ -25,15 +26,15 @@ pytest-github-actions-annotate-failures==0.2.0 pytest-socket==0.7.0 pytest-sugar==1.0.0 pytest-timeout==2.3.1 -pytest-unordered==0.6.0 +pytest-unordered==0.6.1 pytest-picked==0.5.0 pytest-xdist==3.6.1 -pytest==8.2.0 +pytest==8.3.1 requests-mock==1.12.1 respx==0.21.1 syrupy==4.6.1 tqdm==4.66.4 -types-aiofiles==23.2.0.20240403 +types-aiofiles==23.2.0.20240623 types-atomicwrites==1.4.5.1 types-croniter==2.0.0.20240423 types-beautifulsoup4==4.12.0.20240511 @@ -41,13 +42,13 @@ types-caldav==1.3.0.20240331 types-chardet==0.1.5 types-decorator==5.1.8.20240310 types-paho-mqtt==1.6.0.20240321 -types-pillow==10.2.0.20240511 +types-pillow==10.2.0.20240520 types-protobuf==4.24.0.20240106 -types-psutil==5.9.5.20240511 +types-psutil==6.0.0.20240621 types-python-dateutil==2.9.0.20240316 types-python-slugify==8.0.2.20240310 types-pytz==2024.1.0.20240417 types-PyYAML==6.0.12.20240311 types-requests==2.31.0.3 types-xmltodict==0.13.0.3 -uv==0.2.13 +uv==0.2.27 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fc830f0fe7a..f6602bf082b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.2 +AEMET-OpenData==0.5.3 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 @@ -13,7 +13,7 @@ AIOSomecomfort==0.0.25 Adax-local==0.1.5 # homeassistant.components.doorbird -DoorBirdPy==2.1.0 +DoorBirdPy==3.0.2 # homeassistant.components.homekit HAP-python==4.9.1 @@ -21,6 +21,9 @@ HAP-python==4.9.1 # homeassistant.components.tasmota HATasmota==0.9.2 +# homeassistant.components.mastodon +Mastodon.py==1.8.1 + # homeassistant.components.doods # homeassistant.components.generic # homeassistant.components.image_upload @@ -30,10 +33,10 @@ HATasmota==0.9.2 # homeassistant.components.seven_segments # homeassistant.components.sighthound # homeassistant.components.tensorflow -Pillow==10.3.0 +Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.13 +PlexAPI==4.15.14 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -161,10 +164,10 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.5.3 +aioairzone-cloud==0.6.1 # homeassistant.components.airzone -aioairzone==0.7.7 +aioairzone==0.8.1 # homeassistant.components.ambient_network # homeassistant.components.ambient_station @@ -174,16 +177,16 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.8 +aioaquacell==0.2.0 # homeassistant.components.aseko_pool_live -aioaseko==0.1.1 +aioaseko==0.2.0 # homeassistant.components.asuswrt aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.4 +aioautomower==2024.7.3 # homeassistant.components.azure_devops aioazuredevops==2.1.1 @@ -192,7 +195,7 @@ aioazuredevops==2.1.1 aiobafi6==0.9.0 # homeassistant.components.aws -aiobotocore==2.13.0 +aiobotocore==2.13.1 # homeassistant.components.comelit aiocomelit==0.9.0 @@ -222,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.1 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 @@ -237,7 +240,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.1.5 +aiohomekit==3.2.1 # homeassistant.components.hue aiohue==4.7.2 @@ -252,7 +255,7 @@ aiokafka==0.10.0 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.4.15 +aiolifx-themes==0.5.0 # homeassistant.components.lifx aiolifx==1.0.6 @@ -267,7 +270,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.5.0 +aiomealie==0.8.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -314,7 +317,7 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.3.5 +aioqsw==0.4.0 # homeassistant.components.rainforest_raven aioraven==0.7.0 @@ -328,6 +331,9 @@ aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed aioruckus==0.34 +# homeassistant.components.russound_rio +aiorussound==2.2.0 + # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -335,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.0.0 +aioshelly==11.1.0 # homeassistant.components.skybell aioskybell==22.7.0 @@ -383,10 +389,10 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.2 +aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.6.1 +airgradient==0.7.1 # homeassistant.components.airly airly==1.1.0 @@ -404,7 +410,7 @@ airtouch4pyapi==1.0.5 airtouch5py==0.2.10 # homeassistant.components.amberelectric -amberelectric==1.1.0 +amberelectric==1.1.1 # homeassistant.components.androidtv androidtv[async]==0.0.73 @@ -445,7 +451,7 @@ asterisk_mbox==0.5.0 # homeassistant.components.ssdp # homeassistant.components.upnp # homeassistant.components.yeelight -async-upnp-client==0.39.0 +async-upnp-client==0.40.0 # homeassistant.components.arve asyncarve==0.1.1 @@ -459,6 +465,9 @@ auroranoaa==0.0.3 # homeassistant.components.aurora_abb_powerone aurorapy==0.2.7 +# homeassistant.components.autarco +autarco==2.0.0 + # homeassistant.components.axis axis==62 @@ -466,10 +475,10 @@ axis==62 azure-eventhub==5.11.1 # homeassistant.components.azure_data_explorer -azure-kusto-data[aio]==3.1.0 +azure-kusto-data[aio]==4.5.1 # homeassistant.components.azure_data_explorer -azure-kusto-ingest==3.1.0 +azure-kusto-ingest==4.5.1 # homeassistant.components.holiday babel==2.15.0 @@ -480,11 +489,8 @@ base36==0.1.1 # homeassistant.components.scrape beautifulsoup4==4.12.3 -# homeassistant.components.zha -bellows==0.39.1 - # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.15.3 +bimmer-connected[china]==0.16.1 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -518,7 +524,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 @@ -526,8 +532,11 @@ bond-async==0.2.1 # homeassistant.components.bosch_shc boschshcpy==0.2.91 +# homeassistant.components.aws +botocore==1.34.131 + # homeassistant.components.bring -bring-api==0.7.1 +bring-api==0.8.1 # homeassistant.components.broadlink broadlink==0.19.0 @@ -590,7 +599,7 @@ dbus-fast==2.22.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -615,6 +624,9 @@ devolo-home-control-api==0.18.3 # homeassistant.components.devolo_home_network devolo-plc-api==1.4.1 +# homeassistant.components.chacon_dio +dio-chacon-wifi-api==1.1.0 + # homeassistant.components.directv directv==0.4.0 @@ -628,7 +640,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 @@ -648,6 +660,9 @@ easyenergy==2.1.2 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 @@ -673,7 +688,7 @@ energyzero==2.1.1 enocean==0.50 # homeassistant.components.environment_canada -env-canada==0.7.1 +env-canada==0.7.2 # homeassistant.components.season ephem==4.1.5 @@ -699,6 +714,9 @@ eternalegypt==0.0.16 # homeassistant.components.eufylife_ble eufylife-ble-client==0.1.8 +# homeassistant.components.bryant_evolution +evolutionhttp==0.0.18 + # homeassistant.components.faa_delays faadelays==2023.9.1 @@ -764,6 +782,9 @@ gassist-text==0.0.11 # homeassistant.components.google gcal-sync==6.1.4 +# homeassistant.components.geniushub +geniushub-client==0.7.1 + # homeassistant.components.geocaching geocachingapi==0.2.1 @@ -777,7 +798,7 @@ georss-generic-client==0.8 georss-ign-sismologia-client==0.8 # homeassistant.components.qld_bushfire -georss-qld-bushfire-alert-client==0.7 +georss-qld-bushfire-alert-client==0.8 # homeassistant.components.dlna_dmr # homeassistant.components.kef @@ -818,7 +839,7 @@ googlemaps==2.5.1 gotailwind==0.2.3 # homeassistant.components.govee_ble -govee-ble==0.31.3 +govee-ble==0.40.0 # homeassistant.components.govee_light_local govee-local-api==1.5.1 @@ -827,7 +848,7 @@ govee-local-api==1.5.1 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 @@ -870,7 +891,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 # homeassistant.components.conversation -hassil==1.7.1 +hassil==1.7.4 # homeassistant.components.jewish_calendar hdate==0.10.9 @@ -895,13 +916,13 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240710.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.3 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect -homeconnect==0.7.2 +homeconnect==0.8.0 # homeassistant.components.homematicip_cloud homematicip==1.1.1 @@ -942,7 +963,7 @@ ifaddr==0.2.0 imgw_pib==1.0.5 # homeassistant.components.incomfort -incomfort-client==0.6.3 +incomfort-client==0.6.3-1 # homeassistant.components.influxdb influxdb-client==1.24.0 @@ -959,18 +980,25 @@ insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire intellifire4py==2.2.2 +# homeassistant.components.iotty +iottycloud==0.1.3 + # homeassistant.components.isal isal==1.6.1 # homeassistant.components.gogogate2 ismartgate==5.0.1 +# homeassistant.components.israel_rail +israel-rail-api==0.1.2 + # homeassistant.components.abode jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 +# homeassistant.components.command_line # homeassistant.components.rest jsonpath==0.82.2 @@ -984,7 +1012,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 @@ -996,7 +1024,7 @@ krakenex==2.1.0 lacrosse-view==1.0.1 # homeassistant.components.laundrify -laundrify-aio==1.1.2 +laundrify-aio==1.2.2 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1065,7 +1093,7 @@ melnor-bluetooth==0.0.25 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 @@ -1092,7 +1120,7 @@ moehlenhoff-alpha2==1.3.1 monzopy==1.3.0 # homeassistant.components.mopeka -mopeka-iot-ble==0.7.0 +mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds motionblinds==0.6.23 @@ -1131,13 +1159,13 @@ nessclient==1.0.0 netmap==0.7.0.2 # homeassistant.components.nam -nettigo-air-monitor==3.2.0 +nettigo-air-monitor==3.3.0 # homeassistant.components.nexia nexia==2.0.8 # homeassistant.components.nextcloud -nextcloudmonitor==1.5.0 +nextcloudmonitor==1.5.1 # homeassistant.components.discord nextcord==2.6.0 @@ -1146,7 +1174,7 @@ nextcord==2.6.0 nextdns==3.1.0 # homeassistant.components.nibe_heatpump -nibe==2.8.0 +nibe==2.11.0 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1180,7 +1208,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ollama -ollama-hass==0.1.7 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 @@ -1210,7 +1238,7 @@ openhomedevice==2.2.0 openwebifpy==4.2.5 # homeassistant.components.opower -opower==0.5.2 +opower==0.6.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1307,11 +1335,14 @@ py-dormakaba-dkey==1.0.5 # homeassistant.components.improv_ble py-improv-ble-client==1.0.3 +# homeassistant.components.madvr +py-madvr2==1.6.29 + # homeassistant.components.melissa py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==1.0.2 +py-nextbusnext==2.0.3 # homeassistant.components.nightscout py-nightscout==1.2.2 @@ -1322,9 +1353,6 @@ py-sucks==0.9.10 # homeassistant.components.synology_dsm py-synologydsm-api==2.4.4 -# homeassistant.components.seventeentrack -py17track==2021.12.2 - # homeassistant.components.hdmi_cec pyCEC==0.5.2 @@ -1335,7 +1363,7 @@ pyControl4==1.1.0 pyDuotecno==2024.5.1 # homeassistant.components.electrasmart -pyElectra==1.2.3 +pyElectra==1.2.4 # homeassistant.components.rfxtrx pyRFXtrx==0.31.1 @@ -1383,6 +1411,9 @@ pybalboa==1.0.2 # homeassistant.components.blackbird pyblackbird==0.6 +# homeassistant.components.bluesound +pyblu==0.4.0 + # homeassistant.components.neato pybotvac==0.0.25 @@ -1402,7 +1433,7 @@ pycoolmasternet-async==0.1.5 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.11.1 +pydaikin==2.13.1 # homeassistant.components.deconz pydeconz==116 @@ -1434,8 +1465,11 @@ pyefergy==22.5.0 # homeassistant.components.energenie_power_sockets pyegps==0.2.5 +# homeassistant.components.emoncms +pyemoncms==0.0.7 + # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.everlights pyeverlights==0.1.0 @@ -1486,7 +1520,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.1.0 # homeassistant.components.ialarm pyialarm==2.2.0 @@ -1558,7 +1592,7 @@ pylitterbot==2023.5.0 pylutron-caseta==0.20.0 # homeassistant.components.lutron -pylutron==0.2.13 +pylutron==0.2.15 # homeassistant.components.mailgun pymailgunner==1.4 @@ -1572,6 +1606,9 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.1 + # homeassistant.components.mochad pymochad==0.2.0 @@ -1584,6 +1621,9 @@ pymonoprice==0.4 # homeassistant.components.mysensors pymysensors==0.24.0 +# homeassistant.components.iron_os +pynecil==0.2.0 + # homeassistant.components.netgear pynetgear==0.10.10 @@ -1597,7 +1637,7 @@ pynuki==1.6.3 pynws[retry]==1.8.2 # homeassistant.components.nx584 -pynx584==0.5 +pynx584==0.8.2 # homeassistant.components.nzbget pynzbgetapi==0.2.0 @@ -1629,7 +1669,7 @@ pyotgw==2.2.0 pyotp==2.8.0 # homeassistant.components.overkiz -pyoverkiz==1.13.11 +pyoverkiz==1.13.14 # homeassistant.components.onewire pyownet==0.10.0.post1 @@ -1685,17 +1725,15 @@ pyschlage==2024.6.0 # homeassistant.components.sensibo pysensibo==1.0.36 -# homeassistant.components.serial -# homeassistant.components.zha -pyserial-asyncio-fast==0.11 - # homeassistant.components.acer_projector # homeassistant.components.crownstone # homeassistant.components.usb -# homeassistant.components.zha # homeassistant.components.zwave_js pyserial==3.5 +# homeassistant.components.seventeentrack +pyseventeentrack==1.0.0 + # homeassistant.components.sia pysiaalarm==3.1.1 @@ -1736,7 +1774,7 @@ pysqueezebox==0.7.1 pysuez==0.2.0 # homeassistant.components.switchbee -pyswitchbee==1.8.0 +pyswitchbee==1.8.3 # homeassistant.components.tautulli pytautulli==23.1.1 @@ -1763,10 +1801,10 @@ python-fullykiosk==0.0.14 # python-gammu==3.2.4 # homeassistant.components.analytics_insights -python-homeassistant-analytics==0.6.0 +python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.0.0 +python-homewizard-energy==v6.1.1 # homeassistant.components.izone python-izone==1.2.9 @@ -1775,10 +1813,13 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.5 +python-kasa[speedups]==0.7.1 + +# homeassistant.components.linkplay +python-linkplay==0.0.6 # homeassistant.components.matter -python-matter-server==6.2.2 +python-matter-server==6.3.0 # homeassistant.components.xiaomi_miio python-miio==0.5.12 @@ -1818,7 +1859,7 @@ python-songpal==0.16.2 python-tado==0.17.6 # homeassistant.components.technove -python-technove==1.2.2 +python-technove==1.3.1 # homeassistant.components.telegram_bot python-telegram-bot[socks]==21.0.1 @@ -1840,10 +1881,10 @@ pytradfri[async]==9.0.1 # homeassistant.components.trafikverket_ferry # homeassistant.components.trafikverket_train # homeassistant.components.trafikverket_weatherstation -pytrafikverket==0.3.10 +pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 @@ -1855,7 +1896,7 @@ pyuptimerobot==22.2.0 pyvera==0.3.13 # homeassistant.components.vesync -pyvesync==2.1.10 +pyvesync==2.1.12 # homeassistant.components.vizio pyvizio==0.1.61 @@ -1912,25 +1953,25 @@ radiotherm==2.1.0 rapt-ble==0.1.2 # homeassistant.components.refoss -refoss-ha==1.2.1 +refoss-ha==1.2.4 # homeassistant.components.rainmachine regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.4 +renault-api==0.2.5 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.5 +reolink-aio==0.9.7 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.11 +ring-doorbell[listen]==0.8.12 # homeassistant.components.roku rokuecp==0.19.3 @@ -1996,6 +2037,9 @@ sfrbox-api==0.0.8 # homeassistant.components.sharkiq sharkiq==1.0.2 +# homeassistant.components.simplefin +simplefin4py==0.0.18 + # homeassistant.components.sighthound simplehound==0.3 @@ -2087,13 +2131,13 @@ sunweg==3.0.2 surepy==0.9.0 # homeassistant.components.switchbot_cloud -switchbot-api==2.1.0 +switchbot-api==2.2.1 # homeassistant.components.system_bridge -systembridgeconnector==4.0.3 +systembridgeconnector==4.1.0 # homeassistant.components.system_bridge -systembridgemodels==4.0.4 +systembridgemodels==4.1.0 # homeassistant.components.tailscale tailscale==0.6.1 @@ -2107,9 +2151,10 @@ temescal==0.5 # homeassistant.components.temper temperusb==1.6.1 +# homeassistant.components.tesla_fleet # homeassistant.components.teslemetry # homeassistant.components.tessie -tesla-fleet-api==0.6.2 +tesla-fleet-api==0.7.3 # homeassistant.components.powerwall tesla-powerwall==0.5.2 @@ -2118,10 +2163,10 @@ tesla-powerwall==0.5.2 tesla-wall-connector==1.0.2 # homeassistant.components.tessie -tessie-api==0.0.9 +tessie-api==0.1.1 # homeassistant.components.thermobeacon -thermobeacon-ble==0.6.2 +thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 @@ -2151,7 +2196,7 @@ transmission-rpc==7.0.3 ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.0.0 +ttn_client==1.1.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 @@ -2163,13 +2208,13 @@ twentemilieu==2.0.1 twilio==6.32.0 # homeassistant.components.twitch -twitchAPI==4.0.0 +twitchAPI==4.2.1 # homeassistant.components.ukraine_alarm uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==4.2.0 +uiprotect==5.4.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2178,7 +2223,7 @@ ultraheat-api==0.5.7 unifi-discovery==1.2.0 # homeassistant.components.zha -universal-silabs-flasher==0.0.20 +universal-silabs-flasher==0.0.22 # homeassistant.components.upb upb-lib==0.5.8 @@ -2204,7 +2249,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.5 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 @@ -2232,7 +2277,7 @@ vultr==0.1.2 wakeonlan==2.1.0 # homeassistant.components.wallbox -wallbox==0.6.0 +wallbox==0.7.0 # homeassistant.components.folder_watcher watchdog==2.3.1 @@ -2243,9 +2288,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 @@ -2256,7 +2298,7 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.18.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 @@ -2271,12 +2313,11 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.0.0 # homeassistant.components.knx xknxproject==3.7.1 -# homeassistant.components.bluesound # homeassistant.components.fritz # homeassistant.components.rest # homeassistant.components.startca @@ -2292,13 +2333,13 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.2 +yalexs==6.4.3 # homeassistant.components.yeelight yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 @@ -2307,7 +2348,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 @@ -2319,22 +2360,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha-quirks==0.0.117 - -# homeassistant.components.zha -zigpy-deconz==0.23.2 - -# homeassistant.components.zha -zigpy-xbee==0.20.1 - -# homeassistant.components.zha -zigpy-zigate==0.12.1 - -# homeassistant.components.zha -zigpy-znp==0.12.2 - -# homeassistant.components.zha -zigpy==0.64.1 +zha==0.0.28 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index a7e5c20d86c..d57a005bb5d 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.4.9 +ruff==0.5.5 yamllint==1.35.1 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 3c593a2bdf7..f887f8113a7 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -208,8 +208,8 @@ dacite>=1.7.0 # Musle wheels for pandas 2.2.0 cannot be build for any architecture. pandas==2.1.4 -# chacha20poly1305-reuseable==0.12.0 is incompatible with cryptography==42.0.x -chacha20poly1305-reuseable>=0.12.1 +# chacha20poly1305-reuseable==0.12.x is incompatible with cryptography==43.0.x +chacha20poly1305-reuseable>=0.13.0 # pycountry<23.12.11 imports setuptools at run time # https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39 diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index bcb19a14c37..ea3c56200a2 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -14,7 +14,6 @@ from . import ( codeowners, config_flow, config_schema, - coverage, dependencies, dhcp, docker, @@ -53,7 +52,6 @@ INTEGRATION_PLUGINS = [ config_flow, # This needs to run last, after translations are processed ] HASS_PLUGINS = [ - coverage, docker, mypy_config, metadata, diff --git a/script/hassfest/config_schema.py b/script/hassfest/config_schema.py index 141b087472b..06ef2065127 100644 --- a/script/hassfest/config_schema.py +++ b/script/hassfest/config_schema.py @@ -4,7 +4,7 @@ from __future__ import annotations import ast -from homeassistant.core import DOMAIN as HA_DOMAIN +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from .model import Config, Integration @@ -12,7 +12,7 @@ CONFIG_SCHEMA_IGNORE = { # Configuration under the homeassistant key is a special case, it's handled by # conf_util.async_process_ha_core_config already during bootstrapping, not by # a schema in the homeassistant integration. - HA_DOMAIN, + HOMEASSISTANT_DOMAIN, } @@ -21,7 +21,7 @@ def _has_assignment(module: ast.Module, name: str) -> bool: for item in module.body: if type(item) not in (ast.Assign, ast.AnnAssign, ast.AugAssign): continue - if type(item) == ast.Assign: + if type(item) is ast.Assign: for target in item.targets: if getattr(target, "id", None) == name: return True @@ -35,7 +35,7 @@ def _has_function( module: ast.Module, _type: ast.AsyncFunctionDef | ast.FunctionDef, name: str ) -> bool: """Test if the module defines a function.""" - return any(type(item) == _type and item.name == name for item in module.body) + return any(type(item) is _type and item.name == name for item in module.body) def _has_import(module: ast.Module, name: str) -> bool: diff --git a/script/hassfest/coverage.py b/script/hassfest/coverage.py deleted file mode 100644 index 388f2a1c761..00000000000 --- a/script/hassfest/coverage.py +++ /dev/null @@ -1,181 +0,0 @@ -"""Validate coverage files.""" - -from __future__ import annotations - -from pathlib import Path - -from .model import Config, Integration - -DONT_IGNORE = ( - "config_flow.py", - "device_action.py", - "device_condition.py", - "device_trigger.py", - "diagnostics.py", - "group.py", - "intent.py", - "logbook.py", - "media_source.py", - "recorder.py", - "scene.py", -) -FORCE_COVERAGE = ("gold", "platinum") - -CORE_PREFIX = """# Sorted by hassfest. -# -# To sort, run python3 -m script.hassfest -p coverage - -[run] -source = homeassistant -omit = -""" -COMPONENTS_PREFIX = ( - " # omit pieces of code that rely on external devices being present\n" -) -SUFFIX = """[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # TYPE_CHECKING and @overload blocks are never executed during pytest run - if TYPE_CHECKING: - @overload -""" - - -def validate(integrations: dict[str, Integration], config: Config) -> None: - """Validate coverage.""" - coverage_path = config.root / ".coveragerc" - - not_found: list[str] = [] - unsorted: list[str] = [] - checking = False - - previous_line = "" - with coverage_path.open("rt") as fp: - for line in fp: - line = line.strip() - - if line == COMPONENTS_PREFIX.strip(): - previous_line = "" - continue - - if not line or line.startswith("#"): - continue - - if not checking: - if line == "omit =": - checking = True - continue - - # Finished - if line == "[report]": - break - - path = Path(line) - - # Discard wildcard - path_exists = path - while "*" in path_exists.name: - path_exists = path_exists.parent - - if not path_exists.exists(): - not_found.append(line) - continue - - if line < previous_line: - unsorted.append(line) - previous_line = line - - if not line.startswith("homeassistant/components/"): - continue - - # Ignore sub-directories - if len(path.parts) > 4: - continue - - integration_path = path.parent - - integration = integrations[integration_path.name] - - if integration.quality_scale in FORCE_COVERAGE: - integration.add_error( - "coverage", - f"has quality scale {integration.quality_scale} and " - "should not be present in .coveragerc file", - ) - continue - - if (last_part := path.parts[-1]) in {"*", "const.py"} and Path( - f"tests/components/{integration.domain}/__init__.py" - ).exists(): - integration.add_error( - "coverage", - f"has tests and should not use {last_part} in .coveragerc file", - ) - continue - - for check in DONT_IGNORE: - if path.parts[-1] not in {"*", check}: - continue - - if (integration_path / check).exists(): - integration.add_error( - "coverage", - f"{check} must not be ignored by the .coveragerc file", - ) - - if unsorted: - config.add_error( - "coverage", - "Paths are unsorted in .coveragerc file. " - "Run python3 -m script.hassfest\n - " - f"{'\n - '.join(unsorted)}", - fixable=True, - ) - - if not_found: - raise RuntimeError( - f".coveragerc references files that don't exist: {', '.join(not_found)}." - ) - - -def generate(integrations: dict[str, Integration], config: Config) -> None: - """Sort coverage.""" - coverage_path = config.root / ".coveragerc" - core = [] - components = [] - section = "header" - - with coverage_path.open("rt") as fp: - for line in fp: - if line == "[report]\n": - break - - if section != "core" and line == "omit =\n": - section = "core" - elif section != "components" and line == COMPONENTS_PREFIX: - section = "components" - elif section == "core" and line != "\n": - core.append(line) - elif section == "components" and line != "\n": - components.append(line) - - assert core, "core should be a non-empty list" - assert components, "components should be a non-empty list" - content = ( - f"{CORE_PREFIX}{"".join(sorted(core))}\n" - f"{COMPONENTS_PREFIX}{"".join(sorted(components))}\n" - f"\n{SUFFIX}" - ) - - with coverage_path.open("w") as fp: - fp.write(content) diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 8ff0750250f..1c01ee7cf58 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -120,10 +120,6 @@ NO_DIAGNOSTICS = [ "gdacs", "geonetnz_quakes", "hyperion", - # Modbus is excluded because it doesn't have to have a config flow - # according to ADR-0010, since it's a protocol integration. This - # means that it can't implement diagnostics. - "modbus", "nightscout", "pvpc_hourly_pricing", "risco", diff --git a/script/hassfest/mypy_config.py b/script/hassfest/mypy_config.py index 56734257f78..d2aff81aa05 100644 --- a/script/hassfest/mypy_config.py +++ b/script/hassfest/mypy_config.py @@ -36,7 +36,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = { "plugins": "pydantic.mypy", "show_error_codes": "true", "follow_imports": "normal", - "enable_incomplete_feature": ",".join( # noqa: FLY002 + "enable_incomplete_feature": ", ".join( # noqa: FLY002 [ "NewGenericSyntax", ] diff --git a/script/install_integration_requirements.py b/script/install_integration_requirements.py index ab91ea71557..91c9f6a8ed0 100644 --- a/script/install_integration_requirements.py +++ b/script/install_integration_requirements.py @@ -45,6 +45,7 @@ def main() -> int | None: cmd, check=True, ) + return None if __name__ == "__main__": diff --git a/script/licenses.py b/script/licenses.py new file mode 100644 index 00000000000..dc89cdad9a9 --- /dev/null +++ b/script/licenses.py @@ -0,0 +1,251 @@ +"""Tool to check the licenses.""" + +from __future__ import annotations + +from dataclasses import dataclass +import json +from pathlib import Path +import sys + +from awesomeversion import AwesomeVersion + + +@dataclass +class PackageDefinition: + """Package definition.""" + + license: str + name: str + version: AwesomeVersion + + @classmethod + def from_dict(cls, data: dict[str, str]) -> PackageDefinition: + """Create a package definition from a dictionary.""" + return cls( + license=data["License"], + name=data["Name"], + version=AwesomeVersion(data["Version"]), + ) + + +OSI_APPROVED_LICENSES = { + "Academic Free License (AFL)", + "Apache Software License", + "Apple Public Source License", + "Artistic License", + "Attribution Assurance License", + "BSD License", + "Boost Software License 1.0 (BSL-1.0)", + "CEA CNRS Inria Logiciel Libre License, version 2.1 (CeCILL-2.1)", + "Common Development and Distribution License 1.0 (CDDL-1.0)", + "Common Public License", + "Eclipse Public License 1.0 (EPL-1.0)", + "Eclipse Public License 2.0 (EPL-2.0)", + "Educational Community License, Version 2.0 (ECL-2.0)", + "Eiffel Forum License", + "European Union Public Licence 1.0 (EUPL 1.0)", + "European Union Public Licence 1.1 (EUPL 1.1)", + "European Union Public Licence 1.2 (EUPL 1.2)", + "GNU Affero General Public License v3", + "GNU Affero General Public License v3 or later (AGPLv3+)", + "GNU Free Documentation License (FDL)", + "GNU General Public License (GPL)", + "GNU General Public License v2 (GPLv2)", + "GNU General Public License v2 or later (GPLv2+)", + "GNU General Public License v3 (GPLv3)", + "GNU General Public License v3 or later (GPLv3+)", + "GNU Lesser General Public License v2 (LGPLv2)", + "GNU Lesser General Public License v2 or later (LGPLv2+)", + "GNU Lesser General Public License v3 (LGPLv3)", + "GNU Lesser General Public License v3 or later (LGPLv3+)", + "GNU Library or Lesser General Public License (LGPL)", + "Historical Permission Notice and Disclaimer (HPND)", + "IBM Public License", + "ISC License (ISCL)", + "Intel Open Source License", + "Jabber Open Source License", + "MIT License", + "MIT No Attribution License (MIT-0)", + "MITRE Collaborative Virtual Workspace License (CVW)", + "MirOS License (MirOS)", + "Motosoto License", + "Mozilla Public License 1.0 (MPL)", + "Mozilla Public License 1.1 (MPL 1.1)", + "Mozilla Public License 2.0 (MPL 2.0)", + "Mulan Permissive Software License v2 (MulanPSL-2.0)", + "NASA Open Source Agreement v1.3 (NASA-1.3)", + "Nethack General Public License", + "Nokia Open Source License", + "Open Group Test Suite License", + "Open Software License 3.0 (OSL-3.0)", + "PostgreSQL License", + "Python License (CNRI Python License)", + "Python Software Foundation License", + "Qt Public License (QPL)", + "Ricoh Source Code Public License", + "SIL Open Font License 1.1 (OFL-1.1)", + "Sleepycat License", + "Sun Industry Standards Source License (SISSL)", + "Sun Public License", + "The Unlicense (Unlicense)", + "Universal Permissive License (UPL)", + "University of Illinois/NCSA Open Source License", + "Vovida Software License 1.0", + "W3C License", + "X.Net License", + "Zero-Clause BSD (0BSD)", + "Zope Public License", + "zlib/libpng License", + "Apache License", + "MIT", + "apache-2.0", + "GPL-3.0", + "GPLv3+", + "MPL2", + "MPL-2.0", + "Apache 2", + "LGPL v3", + "BSD", + "GNU-3.0", + "GPLv3", + "Eclipse Public License v2.0", + "ISC", + "GPL-2.0-only", + "mit", + "GNU General Public License v3", + "Unlicense", + "Apache-2", + "GPLv2", +} + +EXCEPTIONS = { + "PyMicroBot", # https://github.com/spycle/pyMicroBot/pull/3 + "PySwitchmate", # https://github.com/Danielhiversen/pySwitchmate/pull/16 + "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 + "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 + "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 + "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 + "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 + "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 + "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 + "airthings-ble", # https://github.com/Airthings/airthings-ble/pull/42 + "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 + "asyncio", # PSF License + "chacha20poly1305", # LGPL + "chacha20poly1305-reuseable", # Apache 2.0 or BSD 3-Clause + "commentjson", # https://github.com/vaidik/commentjson/pull/55 + "crownstone-cloud", # https://github.com/crownstone/crownstone-lib-python-cloud/pull/5 + "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 + "crownstone-sse", # https://github.com/crownstone/crownstone-lib-python-sse/pull/2 + "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 + "dio-chacon-wifi-api", + "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 + "enocean", # https://github.com/kipe/enocean/pull/142 + "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 + "heatmiserV3", # https://github.com/andylockran/heatmiserV3/pull/94 + "huum", # https://github.com/frwickst/pyhuum/pull/8 + "imutils", # https://github.com/PyImageSearch/imutils/pull/292 + "iso4217", # Public domain + "kiwiki_client", # https://github.com/c7h/kiwiki_client/pull/6 + "krakenex", # https://github.com/veox/python3-krakenex/pull/145 + "ld2410-ble", # https://github.com/930913/ld2410-ble/pull/7 + "maxcube-api", # https://github.com/uebelack/python-maxcube-api/pull/48 + "nessclient", # https://github.com/nickw444/nessclient/pull/65 + "neurio", # https://github.com/jordanh/neurio-python/pull/13 + "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 + "pigpio", # https://github.com/joan2937/pigpio/pull/608 + "pyEmby", # https://github.com/mezz64/pyEmby/pull/12 + "pymitv", # MIT + "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 + "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 + "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 + "pylutron-caseta", # https://github.com/gurumitts/pylutron-caseta/pull/168 + "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 + "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 + "pyxeoma", # https://github.com/jeradM/pyxeoma/pull/11 + "repoze.lru", + "russound", # https://github.com/laf/russound/pull/14 # codespell:ignore laf + "ruuvitag-ble", # https://github.com/Bluetooth-Devices/ruuvitag-ble/pull/10 + "sensirion-ble", # https://github.com/akx/sensirion-ble/pull/9 + "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 + "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 + "tellduslive", # https://github.com/molobrakos/tellduslive/pull/24 + "tellsticknet", # https://github.com/molobrakos/tellsticknet/pull/33 + "vincenty", # Public domain + "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 +} + +TODO = { + "aiocache": AwesomeVersion( + "0.12.2" + ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? + "asterisk_mbox": AwesomeVersion( + "0.5.0" + ), # No license, integration is deprecated and scheduled for removal in 2024.9.0 + "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) + "uvcclient": AwesomeVersion( + "0.11.0" + ), # No License https://github.com/kk7ds/uvcclient/issues/7 +} + + +def main() -> int: + """Run the main script.""" + raw_licenses = json.loads(Path("licenses.json").read_text()) + package_definitions = [PackageDefinition.from_dict(data) for data in raw_licenses] + exit_code = 0 + for package in package_definitions: + previous_unapproved_version = TODO.get(package.name) + approved = False + for approved_license in OSI_APPROVED_LICENSES: + if approved_license in package.license: + approved = True + break + if previous_unapproved_version is not None: + if previous_unapproved_version < package.version: + if approved: + print( + "Approved license detected for" + f"{package.name}@{package.version}: {package.license}" + ) + print("Please remove the package from the TODO list.") + print() + else: + print( + "We could not detect an OSI-approved license for " + f"{package.name}@{package.version}: {package.license}" + ) + print() + exit_code = 1 + elif not approved and package.name not in EXCEPTIONS: + print( + "We could not detect an OSI-approved license for" + f"{package.name}@{package.version}: {package.license}" + ) + print() + exit_code = 1 + elif approved and package.name in EXCEPTIONS: + print( + "Approved license detected for" + f"{package.name}@{package.version}: {package.license}" + ) + print(f"Please remove the package from the EXCEPTIONS list: {package.name}") + print() + exit_code = 1 + current_packages = {package.name for package in package_definitions} + for package in [*TODO.keys(), *EXCEPTIONS]: + if package not in current_packages: + print( + f"Package {package} is tracked, but not used. Please remove from the licenses.py" + "file." + ) + print() + exit_code = 1 + return exit_code + + +if __name__ == "__main__": + exit_code = main() + if exit_code == 0: + print("All licenses are approved!") + sys.exit(exit_code) diff --git a/script/scaffold/templates/config_flow/tests/conftest.py b/script/scaffold/templates/config_flow/tests/conftest.py index fc217636705..12faacd40df 100644 --- a/script/scaffold/templates/config_flow/tests/conftest.py +++ b/script/scaffold/templates/config_flow/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/script/scaffold/templates/config_flow_helper/tests/conftest.py b/script/scaffold/templates/config_flow_helper/tests/conftest.py index fc217636705..12faacd40df 100644 --- a/script/scaffold/templates/config_flow_helper/tests/conftest.py +++ b/script/scaffold/templates/config_flow_helper/tests/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the NEW_NAME tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/auth/mfa_modules/test_insecure_example.py b/tests/auth/mfa_modules/test_insecure_example.py index f7f8a327059..8caca780ecb 100644 --- a/tests/auth/mfa_modules/test_insecure_example.py +++ b/tests/auth/mfa_modules/test_insecure_example.py @@ -121,7 +121,7 @@ async def test_login(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("pin") == str + assert result["data_schema"].schema.get("pin") is str result = await hass.auth.login_flow.async_configure( result["flow_id"], {"pin": "invalid-code"} diff --git a/tests/auth/mfa_modules/test_notify.py b/tests/auth/mfa_modules/test_notify.py index 23b8811dbf9..d6f4d80f99e 100644 --- a/tests/auth/mfa_modules/test_notify.py +++ b/tests/auth/mfa_modules/test_notify.py @@ -155,7 +155,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str # wait service call finished await hass.async_block_till_done() @@ -214,7 +214,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str # wait service call finished await hass.async_block_till_done() diff --git a/tests/auth/mfa_modules/test_totp.py b/tests/auth/mfa_modules/test_totp.py index 961db3f44ca..fadc3214712 100644 --- a/tests/auth/mfa_modules/test_totp.py +++ b/tests/auth/mfa_modules/test_totp.py @@ -114,7 +114,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: ) assert result["type"] == data_entry_flow.FlowResultType.FORM assert result["step_id"] == "mfa" - assert result["data_schema"].schema.get("code") == str + assert result["data_schema"].schema.get("code") is str with patch("pyotp.TOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( diff --git a/tests/auth/providers/test_command_line.py b/tests/auth/providers/test_command_line.py index 016ce767bad..2ce49730e5f 100644 --- a/tests/auth/providers/test_command_line.py +++ b/tests/auth/providers/test_command_line.py @@ -10,10 +10,11 @@ from homeassistant import data_entry_flow from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import command_line from homeassistant.const import CONF_TYPE +from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -21,7 +22,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> command_line.CommandLineAuthProvider: """Mock provider.""" return command_line.CommandLineAuthProvider( hass, @@ -38,12 +41,18 @@ def provider(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: command_line.CommandLineAuthProvider, +) -> AuthManager: """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential(manager, provider) -> None: +async def test_create_new_credential( + manager: AuthManager, provider: command_line.CommandLineAuthProvider +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "good-user", "password": "good-pass"} @@ -57,7 +66,9 @@ async def test_create_new_credential(manager, provider) -> None: assert not user.local_only -async def test_match_existing_credentials(store, provider) -> None: +async def test_match_existing_credentials( + provider: command_line.CommandLineAuthProvider, +) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -73,24 +84,26 @@ async def test_match_existing_credentials(store, provider) -> None: assert credentials is existing -async def test_invalid_username(provider) -> None: +async def test_invalid_username(provider: command_line.CommandLineAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("bad-user", "good-pass") -async def test_invalid_password(provider) -> None: +async def test_invalid_password(provider: command_line.CommandLineAuthProvider) -> None: """Test we raise if incorrect password specified.""" with pytest.raises(command_line.InvalidAuthError): await provider.async_validate_login("good-user", "bad-pass") -async def test_good_auth(provider) -> None: +async def test_good_auth(provider: command_line.CommandLineAuthProvider) -> None: """Test nothing is raised with good credentials.""" await provider.async_validate_login("good-user", "good-pass") -async def test_good_auth_with_meta(manager, provider) -> None: +async def test_good_auth_with_meta( + manager: AuthManager, provider: command_line.CommandLineAuthProvider +) -> None: """Test metadata is added upon successful authentication.""" provider.config[command_line.CONF_ARGS] = ["--with-meta"] provider.config[command_line.CONF_META] = True @@ -110,7 +123,9 @@ async def test_good_auth_with_meta(manager, provider) -> None: assert user.local_only -async def test_utf_8_username_password(provider) -> None: +async def test_utf_8_username_password( + provider: command_line.CommandLineAuthProvider, +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "ßßß", "password": "äöü"} @@ -118,7 +133,9 @@ async def test_utf_8_username_password(provider) -> None: assert credentials.is_new is True -async def test_login_flow_validates(provider) -> None: +async def test_login_flow_validates( + provider: command_line.CommandLineAuthProvider, +) -> None: """Test login flow.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init() @@ -137,7 +154,7 @@ async def test_login_flow_validates(provider) -> None: assert result["data"]["username"] == "good-user" -async def test_strip_username(provider) -> None: +async def test_strip_username(provider: command_line.CommandLineAuthProvider) -> None: """Test authentication works with username with whitespace around.""" flow = await provider.async_login_flow({}) result = await flow.async_step_init( diff --git a/tests/auth/providers/test_insecure_example.py b/tests/auth/providers/test_insecure_example.py index f0043231c04..7c28028753c 100644 --- a/tests/auth/providers/test_insecure_example.py +++ b/tests/auth/providers/test_insecure_example.py @@ -7,10 +7,11 @@ import pytest from homeassistant.auth import AuthManager, auth_store, models as auth_models from homeassistant.auth.providers import insecure_example +from homeassistant.core import HomeAssistant @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -18,7 +19,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> insecure_example.ExampleAuthProvider: """Mock provider.""" return insecure_example.ExampleAuthProvider( hass, @@ -38,12 +41,18 @@ def provider(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: insecure_example.ExampleAuthProvider, +) -> AuthManager: """Mock manager.""" return AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) -async def test_create_new_credential(manager, provider) -> None: +async def test_create_new_credential( + manager: AuthManager, provider: insecure_example.ExampleAuthProvider +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "user-test", "password": "password-test"} @@ -55,7 +64,9 @@ async def test_create_new_credential(manager, provider) -> None: assert user.is_active -async def test_match_existing_credentials(store, provider) -> None: +async def test_match_existing_credentials( + provider: insecure_example.ExampleAuthProvider, +) -> None: """See if we match existing users.""" existing = auth_models.Credentials( id=uuid.uuid4(), @@ -71,19 +82,21 @@ async def test_match_existing_credentials(store, provider) -> None: assert credentials is existing -async def test_verify_username(provider) -> None: +async def test_verify_username(provider: insecure_example.ExampleAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("non-existing-user", "password-test") -async def test_verify_password(provider) -> None: +async def test_verify_password(provider: insecure_example.ExampleAuthProvider) -> None: """Test we raise if incorrect user specified.""" with pytest.raises(insecure_example.InvalidAuthError): await provider.async_validate_login("user-test", "incorrect-password") -async def test_utf_8_username_password(provider) -> None: +async def test_utf_8_username_password( + provider: insecure_example.ExampleAuthProvider, +) -> None: """Test that we create a new credential.""" credentials = await provider.async_get_or_create_credentials( {"username": "🎉", "password": "😎"} diff --git a/tests/auth/providers/test_trusted_networks.py b/tests/auth/providers/test_trusted_networks.py index 2f84a256f2d..e738e8f0911 100644 --- a/tests/auth/providers/test_trusted_networks.py +++ b/tests/auth/providers/test_trusted_networks.py @@ -17,7 +17,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture -async def store(hass): +async def store(hass: HomeAssistant) -> auth_store.AuthStore: """Mock store.""" store = auth_store.AuthStore(hass) await store.async_load() @@ -25,7 +25,9 @@ async def store(hass): @pytest.fixture -def provider(hass, store): +def provider( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -45,7 +47,9 @@ def provider(hass, store): @pytest.fixture -def provider_with_user(hass, store): +def provider_with_user( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider with trusted users config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -71,7 +75,9 @@ def provider_with_user(hass, store): @pytest.fixture -def provider_bypass_login(hass, store): +def provider_bypass_login( + hass: HomeAssistant, store: auth_store.AuthStore +) -> tn_auth.TrustedNetworksAuthProvider: """Mock provider with allow_bypass_login config.""" return tn_auth.TrustedNetworksAuthProvider( hass, @@ -92,13 +98,21 @@ def provider_bypass_login(hass, store): @pytest.fixture -def manager(hass, store, provider): +def manager( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager.""" return auth.AuthManager(hass, store, {(provider.type, provider.id): provider}, {}) @pytest.fixture -def manager_with_user(hass, store, provider_with_user): +def manager_with_user( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager with trusted user.""" return auth.AuthManager( hass, @@ -109,7 +123,11 @@ def manager_with_user(hass, store, provider_with_user): @pytest.fixture -def manager_bypass_login(hass, store, provider_bypass_login): +def manager_bypass_login( + hass: HomeAssistant, + store: auth_store.AuthStore, + provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, +) -> auth.AuthManager: """Mock manager with allow bypass login.""" return auth.AuthManager( hass, @@ -119,7 +137,7 @@ def manager_bypass_login(hass, store, provider_bypass_login): ) -async def test_config_schema(): +async def test_config_schema() -> None: """Test CONFIG_SCHEMA.""" # Valid configuration tn_auth.CONFIG_SCHEMA( @@ -145,7 +163,9 @@ async def test_config_schema(): ) -async def test_trusted_networks_credentials(manager, provider) -> None: +async def test_trusted_networks_credentials( + manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test trusted_networks credentials related functions.""" owner = await manager.async_create_user("test-owner") tn_owner_cred = await provider.async_get_or_create_credentials({"user": owner.id}) @@ -162,22 +182,24 @@ async def test_trusted_networks_credentials(manager, provider) -> None: await provider.async_get_or_create_credentials({"user": "invalid-user"}) -async def test_validate_access(provider) -> None: +async def test_validate_access(provider: tn_auth.TrustedNetworksAuthProvider) -> None: """Test validate access from trusted networks.""" provider.async_validate_access(ip_address("192.168.0.1")) provider.async_validate_access(ip_address("192.168.128.10")) provider.async_validate_access(ip_address("::1")) provider.async_validate_access(ip_address("fd01:db8::ff00:42:8329")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.0.2")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("127.0.0.1")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("2001:db8::ff00:42:8329")) -async def test_validate_access_proxy(hass: HomeAssistant, provider) -> None: +async def test_validate_access_proxy( + hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test validate access from trusted networks are blocked from proxy.""" await async_setup_component( @@ -192,15 +214,17 @@ async def test_validate_access_proxy(hass: HomeAssistant, provider) -> None: ) provider.async_validate_access(ip_address("192.168.128.2")) provider.async_validate_access(ip_address("fd00::2")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.0")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.1")) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("fd00::1")) -async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None: +async def test_validate_access_cloud( + hass: HomeAssistant, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test validate access from trusted networks are blocked from cloud.""" await async_setup_component( hass, @@ -217,21 +241,25 @@ async def test_validate_access_cloud(hass: HomeAssistant, provider) -> None: provider.async_validate_access(ip_address("192.168.128.2")) remote.is_cloud_request.set(True) - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_access(ip_address("192.168.128.2")) -async def test_validate_refresh_token(provider) -> None: +async def test_validate_refresh_token( + provider: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Verify re-validation of refresh token.""" with patch.object(provider, "async_validate_access") as mock: - with pytest.raises(tn_auth.InvalidAuthError): + with pytest.raises(auth.InvalidAuthError): provider.async_validate_refresh_token(Mock(), None) provider.async_validate_refresh_token(Mock(), "127.0.0.1") mock.assert_called_once_with(ip_address("127.0.0.1")) -async def test_login_flow(manager, provider) -> None: +async def test_login_flow( + manager: auth.AuthManager, provider: tn_auth.TrustedNetworksAuthProvider +) -> None: """Test login flow.""" owner = await manager.async_create_user("test-owner") user = await manager.async_create_user("test-user") @@ -258,7 +286,10 @@ async def test_login_flow(manager, provider) -> None: assert step["data"]["user"] == user.id -async def test_trusted_users_login(manager_with_user, provider_with_user) -> None: +async def test_trusted_users_login( + manager_with_user: auth.AuthManager, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test available user list changed per different IP.""" owner = await manager_with_user.async_create_user("test-owner") sys_user = await manager_with_user.async_create_system_user( @@ -338,7 +369,10 @@ async def test_trusted_users_login(manager_with_user, provider_with_user) -> Non assert schema({"user": sys_user.id}) -async def test_trusted_group_login(manager_with_user, provider_with_user) -> None: +async def test_trusted_group_login( + manager_with_user: auth.AuthManager, + provider_with_user: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test config trusted_user with group_id.""" owner = await manager_with_user.async_create_user("test-owner") # create a user in user group @@ -391,7 +425,10 @@ async def test_trusted_group_login(manager_with_user, provider_with_user) -> Non assert schema({"user": user.id}) -async def test_bypass_login_flow(manager_bypass_login, provider_bypass_login) -> None: +async def test_bypass_login_flow( + manager_bypass_login: auth.AuthManager, + provider_bypass_login: tn_auth.TrustedNetworksAuthProvider, +) -> None: """Test login flow can be bypass if only one user available.""" owner = await manager_bypass_login.async_create_user("test-owner") diff --git a/tests/common.py b/tests/common.py index f5531dbf40d..64e11ee7b51 100644 --- a/tests/common.py +++ b/tests/common.py @@ -3,8 +3,17 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine, Mapping, Sequence -from contextlib import asynccontextmanager, contextmanager +from collections.abc import ( + AsyncGenerator, + Callable, + Coroutine, + Generator, + Iterable, + Iterator, + Mapping, + Sequence, +) +from contextlib import asynccontextmanager, contextmanager, suppress from datetime import UTC, datetime, timedelta from enum import Enum import functools as ft @@ -23,7 +32,7 @@ from unittest.mock import AsyncMock, Mock, patch from aiohttp.test_utils import unused_port as get_test_instance_port # noqa: F401 import pytest from syrupy import SnapshotAssertion -from typing_extensions import AsyncGenerator, Generator +from typing_extensions import TypeVar import voluptuous as vol from homeassistant import auth, bootstrap, config_entries, loader @@ -82,8 +91,12 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import JSONEncoder, _orjson_default_encoder, json_dumps from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.async_ import run_callback_threadsafe +from homeassistant.util.async_ import ( + _SHUTDOWN_RUN_CALLBACK_THREADSAFE, + run_callback_threadsafe, +) import homeassistant.util.dt as dt_util +from homeassistant.util.event_type import EventType from homeassistant.util.json import ( JsonArrayType, JsonObjectType, @@ -101,6 +114,8 @@ from .testing_config.custom_components.test_constant_deprecation import ( import_deprecated_constant, ) +_DataT = TypeVar("_DataT", bound=Mapping[str, Any], default=dict[str, Any]) + _LOGGER = logging.getLogger(__name__) INSTANCES = [] CLIENT_ID = "https://example.com/app" @@ -364,6 +379,9 @@ async def async_test_home_assistant( finally: # Restore timezone, it is set when creating the hass object dt_util.set_default_time_zone(orig_tz) + # Remove loop shutdown indicator to not interfere with additional hass objects + with suppress(AttributeError): + delattr(hass.loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE) def async_mock_service( @@ -379,7 +397,7 @@ def async_mock_service( calls = [] @callback - def mock_service_log(call): # pylint: disable=unnecessary-lambda + def mock_service_log(call): """Mock service call.""" calls.append(call) if raise_exception is not None: @@ -1428,7 +1446,7 @@ async def get_system_health_info(hass: HomeAssistant, domain: str) -> dict[str, @contextmanager -def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> None: +def mock_config_flow(domain: str, config_flow: type[ConfigFlow]) -> Iterator[None]: """Mock a config flow handler.""" original_handler = config_entries.HANDLERS.get(domain) config_entries.HANDLERS[domain] = config_flow @@ -1496,12 +1514,14 @@ def mock_platform( module_cache[platform_path] = module or Mock() -def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: +def async_capture_events( + hass: HomeAssistant, event_name: EventType[_DataT] | str +) -> list[Event[_DataT]]: """Create a helper that captures events.""" - events = [] + events: list[Event[_DataT]] = [] @callback - def capture_events(event: Event) -> None: + def capture_events(event: Event[_DataT]) -> None: events.append(event) hass.bus.async_listen(event_name, capture_events) @@ -1510,14 +1530,14 @@ def async_capture_events(hass: HomeAssistant, event_name: str) -> list[Event]: @callback -def async_mock_signal( - hass: HomeAssistant, signal: SignalType[Any] | str -) -> list[tuple[Any]]: +def async_mock_signal[*_Ts]( + hass: HomeAssistant, signal: SignalType[*_Ts] | str +) -> list[tuple[*_Ts]]: """Catch all dispatches to a signal.""" - calls = [] + calls: list[tuple[*_Ts]] = [] @callback - def mock_signal_handler(*args: Any) -> None: + def mock_signal_handler(*args: *_Ts) -> None: """Mock service call.""" calls.append(args) @@ -1717,7 +1737,7 @@ def extract_stack_to_frame(extract_stack: list[Mock]) -> FrameType: def setup_test_component_platform( hass: HomeAssistant, domain: str, - entities: Sequence[Entity], + entities: Iterable[Entity], from_config_entry: bool = False, built_in: bool = True, ) -> MockPlatform: diff --git a/tests/components/abode/conftest.py b/tests/components/abode/conftest.py index 21b236540d0..097eb568d4a 100644 --- a/tests/components/abode/conftest.py +++ b/tests/components/abode/conftest.py @@ -1,11 +1,11 @@ """Configuration for Abode tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from jaraco.abode.helpers import urls as URL import pytest from requests_mock import Mocker -from typing_extensions import Generator from tests.common import load_fixture from tests.components.light.conftest import mock_light_profiles # noqa: F401 diff --git a/tests/components/accuweather/conftest.py b/tests/components/accuweather/conftest.py index 3b0006068ea..737fd3f84b6 100644 --- a/tests/components/accuweather/conftest.py +++ b/tests/components/accuweather/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AccuWeather tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.accuweather.const import DOMAIN diff --git a/tests/components/aemet/conftest.py b/tests/components/aemet/conftest.py index aa4f537c7fb..38f4793541c 100644 --- a/tests/components/aemet/conftest.py +++ b/tests/components/aemet/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for aemet.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/aemet/test_diagnostics.py b/tests/components/aemet/test_diagnostics.py index 0d94995a85b..6d007dd0465 100644 --- a/tests/components/aemet/test_diagnostics.py +++ b/tests/components/aemet/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.aemet.const import DOMAIN from homeassistant.core import HomeAssistant @@ -30,4 +31,4 @@ async def test_config_entry_diagnostics( return_value={}, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/aftership/conftest.py b/tests/components/aftership/conftest.py index 1704b099cc2..d66ae267bfe 100644 --- a/tests/components/aftership/conftest.py +++ b/tests/components/aftership/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the AfterShip tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/agent_dvr/conftest.py b/tests/components/agent_dvr/conftest.py index a62e1738850..0ce1c008a23 100644 --- a/tests/components/agent_dvr/conftest.py +++ b/tests/components/agent_dvr/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Agent DVR.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index 7ca1198ce5f..a6ee85ecbdd 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -1,10 +1,10 @@ """AirGradient tests configuration.""" +from collections.abc import Generator from unittest.mock import patch from airgradient import Config, Measures import pytest -from typing_extensions import Generator from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/airgradient/snapshots/test_init.ambr b/tests/components/airgradient/snapshots/test_init.ambr index 4462a996a49..e47c5b38bbc 100644 --- a/tests/components/airgradient/snapshots/test_init.ambr +++ b/tests/components/airgradient/snapshots/test_init.ambr @@ -20,7 +20,8 @@ 'labels': set({ }), 'manufacturer': 'AirGradient', - 'model': 'I-9PSL', + 'model': 'AirGradient ONE', + 'model_id': 'I-9PSL', 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , @@ -51,7 +52,8 @@ 'labels': set({ }), 'manufacturer': 'AirGradient', - 'model': 'O-1PPT', + 'model': 'AirGradient Open Air', + 'model_id': 'O-1PPT', 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 217d2ac0e8c..222ac5d04af 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -3,8 +3,11 @@ from ipaddress import ip_address from unittest.mock import AsyncMock -from airgradient import AirGradientConnectionError, ConfigurationControl -from mashumaro import MissingField +from airgradient import ( + AirGradientConnectionError, + AirGradientParseError, + ConfigurationControl, +) from homeassistant.components.airgradient import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -141,9 +144,7 @@ async def test_flow_old_firmware_version( mock_setup_entry: AsyncMock, ) -> None: """Test flow with old firmware version.""" - mock_airgradient_client.get_current_measures.side_effect = MissingField( - "", object, object - ) + mock_airgradient_client.get_current_measures.side_effect = AirGradientParseError result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/airly/test_diagnostics.py b/tests/components/airly/test_diagnostics.py index 7364824e594..9a61bf5abee 100644 --- a/tests/components/airly/test_diagnostics.py +++ b/tests/components/airly/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Airly diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -22,4 +23,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airnow/conftest.py b/tests/components/airnow/conftest.py index 676595250f1..c5d23fa7289 100644 --- a/tests/components/airnow/conftest.py +++ b/tests/components/airnow/conftest.py @@ -1,10 +1,10 @@ """Define fixtures for AirNow tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.airnow import DOMAIN from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS diff --git a/tests/components/airnow/test_diagnostics.py b/tests/components/airnow/test_diagnostics.py index 7329398e789..eb79dabe51a 100644 --- a/tests/components/airnow/test_diagnostics.py +++ b/tests/components/airnow/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -27,7 +28,6 @@ async def test_entry_diagnostics( return_value="PST", ): assert await hass.config_entries.async_setup(config_entry.entry_id) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airq/conftest.py b/tests/components/airq/conftest.py index 5df032c0308..a132153a76f 100644 --- a/tests/components/airq/conftest.py +++ b/tests/components/airq/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for air-Q.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/airtouch5/__init__.py b/tests/components/airtouch5/__init__.py index 2b76786e7e5..567be6af774 100644 --- a/tests/components/airtouch5/__init__.py +++ b/tests/components/airtouch5/__init__.py @@ -1 +1,13 @@ """Tests for the Airtouch 5 integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/airtouch5/conftest.py b/tests/components/airtouch5/conftest.py index d6d55689f17..fab26e3f6cc 100644 --- a/tests/components/airtouch5/conftest.py +++ b/tests/components/airtouch5/conftest.py @@ -1,9 +1,23 @@ """Common fixtures for the Airtouch 5 tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch +from airtouch5py.data_packet_factory import DataPacketFactory +from airtouch5py.packets.ac_ability import AcAbility +from airtouch5py.packets.ac_status import AcFanSpeed, AcMode, AcPowerState, AcStatus +from airtouch5py.packets.zone_name import ZoneName +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) import pytest -from typing_extensions import Generator + +from homeassistant.components.airtouch5.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry @pytest.fixture @@ -13,3 +27,107 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.airtouch5.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="1.1.1.1", + data={ + CONF_HOST: "1.1.1.1", + }, + ) + + +@pytest.fixture +def mock_airtouch5_client() -> Generator[AsyncMock]: + """Mock an Airtouch5 client.""" + + with ( + patch( + "homeassistant.components.airtouch5.Airtouch5SimpleClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.airtouch5.config_flow.Airtouch5SimpleClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.data_packet_factory = DataPacketFactory() + client.ac = [ + AcAbility( + ac_number=1, + ac_name="AC 1", + start_zone_number=1, + zone_count=2, + supports_mode_cool=True, + supports_mode_fan=True, + supports_mode_dry=True, + supports_mode_heat=True, + supports_mode_auto=True, + supports_fan_speed_intelligent_auto=True, + supports_fan_speed_turbo=True, + supports_fan_speed_powerful=True, + supports_fan_speed_high=True, + supports_fan_speed_medium=True, + supports_fan_speed_low=True, + supports_fan_speed_quiet=True, + supports_fan_speed_auto=True, + min_cool_set_point=15, + max_cool_set_point=25, + min_heat_set_point=20, + max_heat_set_point=30, + ) + ] + client.latest_ac_status = { + 1: AcStatus( + ac_power_state=AcPowerState.ON, + ac_number=1, + ac_mode=AcMode.AUTO, + ac_fan_speed=AcFanSpeed.AUTO, + ac_setpoint=24, + turbo_active=False, + bypass_active=False, + spill_active=False, + timer_set=False, + temperature=24, + error_code=0, + ) + } + + client.zones = [ZoneName(1, "Zone 1"), ZoneName(2, "Zone 2")] + client.latest_zone_status = { + 1: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=0.9, + set_point=24, + has_sensor=False, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + 2: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.TEMPERATURE_CONTROL, + open_percentage=1, + set_point=24, + has_sensor=True, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + } + + client.connection_state_callbacks = [] + client.zone_status_callbacks = [] + client.ac_status_callbacks = [] + + yield client diff --git a/tests/components/airtouch5/snapshots/test_cover.ambr b/tests/components/airtouch5/snapshots/test_cover.ambr new file mode 100644 index 00000000000..a8e57f69527 --- /dev/null +++ b/tests/components/airtouch5/snapshots/test_cover.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_all_entities[cover.zone_1_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_1_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_1_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_1_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 90, + 'device_class': 'damper', + 'friendly_name': 'Zone 1 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_1_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_all_entities[cover.zone_2_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_2_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_2_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_2_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'device_class': 'damper', + 'friendly_name': 'Zone 2 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_2_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py new file mode 100644 index 00000000000..295535cd95d --- /dev/null +++ b/tests/components/airtouch5/test_cover.py @@ -0,0 +1,143 @@ +"""Tests for the Airtouch5 cover platform.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock, patch + +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + STATE_OPEN, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_CLOSED, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +COVER_ENTITY_ID = "cover.zone_1_damper" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch("homeassistant.components.airtouch5.PLATFORMS", [Platform.COVER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_cover_actions( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the actions of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: COVER_ENTITY_ID, ATTR_POSITION: 50}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + +async def test_cover_callbacks( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the callbacks of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + # We find the callback method on the mock client + zone_status_callback: Callable[[dict[int, ZoneStatusZone]], None] = ( + mock_airtouch5_client.zone_status_callbacks[2] + ) + + # Define a method to simply call it + async def _call_zone_status_callback(open_percentage: int) -> None: + zsz = ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=open_percentage, + set_point=None, + has_sensor=False, + temperature=None, + spill_active=False, + is_low_battery=False, + ) + zone_status_callback({1: zsz}) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + + # Partly open + await _call_zone_status_callback(0.7) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 + + # Fully open + await _call_zone_status_callback(1) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 + + # Fully closed + await _call_zone_status_callback(0.0) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_CLOSED + assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 + + # Partly reopened + await _call_zone_status_callback(0.3) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 diff --git a/tests/components/airvisual/conftest.py b/tests/components/airvisual/conftest.py index a82dc0ab78c..cc49b60e0d8 100644 --- a/tests/components/airvisual/conftest.py +++ b/tests/components/airvisual/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual import ( CONF_CITY, diff --git a/tests/components/airvisual/test_diagnostics.py b/tests/components/airvisual/test_diagnostics.py index 072e4559705..0253f102c59 100644 --- a/tests/components/airvisual/test_diagnostics.py +++ b/tests/components/airvisual/test_diagnostics.py @@ -1,6 +1,7 @@ """Test AirVisual diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airvisual_pro/conftest.py b/tests/components/airvisual_pro/conftest.py index d25e9821d91..4acf9188889 100644 --- a/tests/components/airvisual_pro/conftest.py +++ b/tests/components/airvisual_pro/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for AirVisual Pro.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.airvisual_pro.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD diff --git a/tests/components/airvisual_pro/test_diagnostics.py b/tests/components/airvisual_pro/test_diagnostics.py index dd87d00be30..372b62eaf38 100644 --- a/tests/components/airvisual_pro/test_diagnostics.py +++ b/tests/components/airvisual_pro/test_diagnostics.py @@ -1,6 +1,7 @@ """Test AirVisual Pro diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone/snapshots/test_diagnostics.ambr b/tests/components/airzone/snapshots/test_diagnostics.ambr index adf0176765c..2adf50558e0 100644 --- a/tests/components/airzone/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone/snapshots/test_diagnostics.ambr @@ -267,10 +267,6 @@ 'temp-set': 45, 'temp-unit': 0, }), - 'new-systems': list([ - ]), - 'new-zones': list([ - ]), 'num-systems': 3, 'num-zones': 7, 'systems': dict({ diff --git a/tests/components/airzone/test_climate.py b/tests/components/airzone/test_climate.py index fa972bd3899..0f23c151e0e 100644 --- a/tests/components/airzone/test_climate.py +++ b/tests/components/airzone/test_climate.py @@ -248,7 +248,7 @@ async def test_airzone_create_climates(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("climate.salon") assert state.attributes.get(ATTR_MAX_TEMP) == 25 diff --git a/tests/components/airzone/test_coordinator.py b/tests/components/airzone/test_coordinator.py index 06c77bebb81..583758a6bee 100644 --- a/tests/components/airzone/test_coordinator.py +++ b/tests/components/airzone/test_coordinator.py @@ -8,6 +8,7 @@ from aioairzone.exceptions import ( InvalidMethod, SystemOutOfRange, ) +from freezegun.api import FrozenDateTimeFactory from homeassistant.components.airzone.const import DOMAIN from homeassistant.components.airzone.coordinator import SCAN_INTERVAL @@ -15,7 +16,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .util import CONFIG, HVAC_MOCK, HVAC_VERSION_MOCK +from .util import CONFIG, HVAC_MOCK, HVAC_MOCK_NEW_ZONES, HVAC_VERSION_MOCK from tests.common import MockConfigEntry, async_fire_time_changed @@ -64,3 +65,62 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: state = hass.states.get("sensor.despacho_temperature") assert state.state == STATE_UNAVAILABLE + + +async def test_coordinator_new_devices( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test new devices on coordinator update.""" + + config_entry = MockConfigEntry( + data=CONFIG, + domain=DOMAIN, + unique_id="airzone_unique_id", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_dhw", + side_effect=HotWaterNotAvailable, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac", + return_value=HVAC_MOCK_NEW_ZONES, + ) as mock_hvac, + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_hvac_systems", + side_effect=SystemOutOfRange, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_version", + return_value=HVAC_VERSION_MOCK, + ), + patch( + "homeassistant.components.airzone.AirzoneLocalApi.get_webserver", + side_effect=InvalidMethod, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + mock_hvac.reset_mock() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state is None + + mock_hvac.return_value = HVAC_MOCK + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_hvac.assert_called_once() + + state = hass.states.get("sensor.salon_temperature") + assert state.state == "19.6" + + state = hass.states.get("sensor.dorm_ppal_temperature") + assert state.state == "21.1" diff --git a/tests/components/airzone/test_diagnostics.py b/tests/components/airzone/test_diagnostics.py index 6a03b9f1985..bca75bca778 100644 --- a/tests/components/airzone/test_diagnostics.py +++ b/tests/components/airzone/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import patch from aioairzone.const import RAW_HVAC, RAW_VERSION, RAW_WEBSERVER from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.airzone.const import DOMAIN from homeassistant.core import HomeAssistant @@ -37,4 +38,4 @@ async def test_config_entry_diagnostics( }, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone/test_sensor.py b/tests/components/airzone/test_sensor.py index 3d75599d2d2..352994d6313 100644 --- a/tests/components/airzone/test_sensor.py +++ b/tests/components/airzone/test_sensor.py @@ -113,7 +113,7 @@ async def test_airzone_sensors_availability(hass: HomeAssistant) -> None: ), ): async_fire_time_changed(hass, utcnow() + SCAN_INTERVAL) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("sensor.dorm_ppal_temperature") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/airzone/util.py b/tests/components/airzone/util.py index 6e3e0eccc8f..2cdb7a9c6f9 100644 --- a/tests/components/airzone/util.py +++ b/tests/components/airzone/util.py @@ -1,5 +1,6 @@ """Tests for the Airzone integration.""" +from copy import deepcopy from unittest.mock import patch from aioairzone.const import ( @@ -274,6 +275,16 @@ HVAC_MOCK = { ] } +HVAC_MOCK_NEW_ZONES = { + API_SYSTEMS: [ + { + API_DATA: [ + deepcopy(HVAC_MOCK[API_SYSTEMS][0][API_DATA][0]), + ] + } + ] +} + HVAC_DHW_MOCK = { API_DATA: { API_SYSTEM_ID: 0, diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 31065d68a47..26a606bde42 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -114,6 +114,7 @@ 'installation': 'installation1', 'is-connected': True, 'mode': 3, + 'model': 'Aidoo', 'modes': list([ 1, 2, @@ -156,6 +157,7 @@ 'installation': 'installation1', 'is-connected': True, 'mode': 2, + 'model': 'Aidoo Pro', 'modes': list([ 1, 2, @@ -345,6 +347,7 @@ 'temperature-setpoint-max': 30.0, 'temperature-setpoint-min': 15.0, 'temperature-step': 0.5, + 'user-access': 'admin', 'web-servers': list([ 'webserver1', 'webserver2', @@ -370,10 +373,12 @@ '_id': 'error-id', }), ]), + 'firmware': '3.35', 'id': 'system1', 'installation': 'installation1', 'is-connected': True, 'mode': 2, + 'model': 'c6', 'modes': list([ 2, 3, @@ -391,10 +396,12 @@ 'webserver1': dict({ 'available': True, 'connection-date': '2023-05-07T12:55:51.000Z', + 'cpu-usage': 32, 'disconnection-date': '2023-01-01T22:26:55.376Z', 'firmware': '3.44', 'id': 'webserver1', 'installation': 'installation1', + 'memory-free': 42616, 'name': 'WebServer 11:22:33:44:55:66', 'type': 'ws_az', 'wifi-channel': 36, @@ -494,6 +501,8 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, + 'thermostat-fw': '3.52', + 'thermostat-model': 'blueface', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 1, @@ -557,6 +566,11 @@ 'temperature-setpoint-stop-air': 24.0, 'temperature-setpoint-vent-air': 24.0, 'temperature-step': 0.5, + 'thermostat-battery': 54, + 'thermostat-battery-low': False, + 'thermostat-coverage': 76, + 'thermostat-fw': '3.33', + 'thermostat-model': 'thinkradio', 'web-server': 'webserver1', 'ws-connected': True, 'zone': 2, diff --git a/tests/components/airzone_cloud/test_binary_sensor.py b/tests/components/airzone_cloud/test_binary_sensor.py index 8e065821057..bb2d0f78060 100644 --- a/tests/components/airzone_cloud/test_binary_sensor.py +++ b/tests/components/airzone_cloud/test_binary_sensor.py @@ -47,6 +47,9 @@ async def test_airzone_create_binary_sensors(hass: HomeAssistant) -> None: state = hass.states.get("binary_sensor.dormitorio_air_quality_active") assert state.state == STATE_OFF + state = hass.states.get("binary_sensor.dormitorio_battery") + assert state.state == STATE_OFF + state = hass.states.get("binary_sensor.dormitorio_floor_demand") assert state.state == STATE_OFF diff --git a/tests/components/airzone_cloud/test_config_flow.py b/tests/components/airzone_cloud/test_config_flow.py index 86a70ced51a..04e253eb494 100644 --- a/tests/components/airzone_cloud/test_config_flow.py +++ b/tests/components/airzone_cloud/test_config_flow.py @@ -15,6 +15,7 @@ from .util import ( GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, WS_ID, + mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -28,6 +29,10 @@ async def test_form(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ) as mock_setup_entry, + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -99,6 +104,10 @@ async def test_installations_list_error(hass: HomeAssistant) -> None: "homeassistant.components.airzone_cloud.async_setup_entry", return_value=True, ), + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/airzone_cloud/test_coordinator.py b/tests/components/airzone_cloud/test_coordinator.py index b4b7afd6086..e2b80e66672 100644 --- a/tests/components/airzone_cloud/test_coordinator.py +++ b/tests/components/airzone_cloud/test_coordinator.py @@ -14,6 +14,7 @@ from .util import ( CONFIG, GET_INSTALLATION_MOCK, GET_INSTALLATIONS_MOCK, + mock_get_device_config, mock_get_device_status, mock_get_webserver, ) @@ -32,6 +33,10 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: config_entry.add_to_hass(hass) with ( + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ) as mock_device_config, patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, @@ -56,11 +61,13 @@ async def test_coordinator_client_connector_error(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + mock_device_config.assert_called() mock_device_status.assert_called() mock_installation.assert_awaited_once() mock_installations.assert_called_once() mock_webserver.assert_called() + mock_device_config.reset_mock() mock_device_status.reset_mock() mock_installation.reset_mock() mock_installations.reset_mock() diff --git a/tests/components/airzone_cloud/test_diagnostics.py b/tests/components/airzone_cloud/test_diagnostics.py index 254dba16b09..d3e23fc7f4b 100644 --- a/tests/components/airzone_cloud/test_diagnostics.py +++ b/tests/components/airzone_cloud/test_diagnostics.py @@ -15,6 +15,7 @@ from aioairzone_cloud.const import ( RAW_WEBSERVERS, ) from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.airzone_cloud.const import DOMAIN from homeassistant.const import CONF_ID @@ -111,4 +112,4 @@ async def test_config_entry_diagnostics( return_value=RAW_DATA_MOCK, ): result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/airzone_cloud/test_sensor.py b/tests/components/airzone_cloud/test_sensor.py index 31fe52f3302..cf291ec23a6 100644 --- a/tests/components/airzone_cloud/test_sensor.py +++ b/tests/components/airzone_cloud/test_sensor.py @@ -21,8 +21,11 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: assert state.state == "20.0" # WebServers - state = hass.states.get("sensor.webserver_11_22_33_44_55_66_signal_strength") - assert state.state == "-56" + state = hass.states.get("sensor.webserver_11_22_33_44_55_66_cpu_usage") + assert state.state == "32" + + state = hass.states.get("sensor.webserver_11_22_33_44_55_66_free_memory") + assert state.state == "42616" state = hass.states.get("sensor.webserver_11_22_33_44_55_67_signal_strength") assert state.state == "-77" @@ -31,6 +34,9 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_air_quality_index") assert state.state == "1" + state = hass.states.get("sensor.dormitorio_battery") + assert state.state == "54" + state = hass.states.get("sensor.dormitorio_pm1") assert state.state == "3" @@ -40,6 +46,9 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.dormitorio_pm10") assert state.state == "3" + state = hass.states.get("sensor.dormitorio_signal_percentage") + assert state.state == "76" + state = hass.states.get("sensor.dormitorio_temperature") assert state.state == "25.0" diff --git a/tests/components/airzone_cloud/util.py b/tests/components/airzone_cloud/util.py index 6e7dad707f1..fb538ea7c8e 100644 --- a/tests/components/airzone_cloud/util.py +++ b/tests/components/airzone_cloud/util.py @@ -3,8 +3,9 @@ from typing import Any from unittest.mock import patch -from aioairzone_cloud.common import OperationMode +from aioairzone_cloud.common import OperationMode, UserAccessType from aioairzone_cloud.const import ( + API_ACCESS_TYPE, API_ACTIVE, API_AIR_ACTIVE, API_AQ_ACTIVE, @@ -23,12 +24,16 @@ from aioairzone_cloud.const import ( API_CELSIUS, API_CONFIG, API_CONNECTION_DATE, + API_CPU_WS, API_DEVICE_ID, API_DEVICES, API_DISCONNECTION_DATE, API_DOUBLE_SET_POINT, API_ERRORS, API_FAH, + API_FREE, + API_FREE_MEM, + API_GENERAL, API_GROUP_ID, API_GROUPS, API_HUMIDITY, @@ -44,6 +49,8 @@ from aioairzone_cloud.const import ( API_POWER, API_POWERFUL_MODE, API_RAD_ACTIVE, + API_RADIO_BATTERY_PERCENT, + API_RADIO_COVERAGE_PERCENT, API_RANGE_MAX_AIR, API_RANGE_MIN_AIR, API_RANGE_SP_MAX_ACS, @@ -79,8 +86,12 @@ from aioairzone_cloud.const import ( API_STAT_SSID, API_STATUS, API_STEP, + API_SYSTEM_FW, API_SYSTEM_NUMBER, + API_SYSTEM_TYPE, API_TANK_TEMP, + API_THERMOSTAT_FW, + API_THERMOSTAT_TYPE, API_TYPE, API_WARNINGS, API_WS_CONNECTED, @@ -184,6 +195,7 @@ GET_INSTALLATIONS_MOCK = { { API_INSTALLATION_ID: CONFIG[CONF_ID], API_NAME: "House", + API_ACCESS_TYPE: UserAccessType.ADMIN, API_WS_IDS: [ WS_ID, WS_ID_AIDOO, @@ -202,6 +214,12 @@ GET_WEBSERVER_MOCK = { API_STAT_AP_MAC: "00:00:00:00:00:00", }, API_STATUS: { + API_CPU_WS: { + API_GENERAL: 32, + }, + API_FREE_MEM: { + API_FREE: 42616, + }, API_IS_CONNECTED: True, API_STAT_QUALITY: 4, API_STAT_RSSI: -56, @@ -245,6 +263,30 @@ GET_WEBSERVER_MOCK_AIDOO_PRO = { } +def mock_get_device_config(device: Device) -> dict[str, Any]: + """Mock API device config.""" + + if device.get_id() == "system1": + return { + API_SYSTEM_FW: "3.35", + API_SYSTEM_TYPE: "c6", + } + if device.get_id() == "zone1": + return { + API_THERMOSTAT_FW: "3.52", + API_THERMOSTAT_TYPE: "blueface", + } + if device.get_id() == "zone2": + return { + API_THERMOSTAT_FW: "3.33", + API_THERMOSTAT_TYPE: "thinkradio", + API_RADIO_BATTERY_PERCENT: 54, + API_RADIO_COVERAGE_PERCENT: 76, + } + + return {} + + def mock_get_device_status(device: Device) -> dict[str, Any]: """Mock API device status.""" @@ -470,6 +512,10 @@ async def async_init_integration( config_entry.add_to_hass(hass) with ( + patch( + "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_config", + side_effect=mock_get_device_config, + ), patch( "homeassistant.components.airzone_cloud.AirzoneCloudApi.api_get_device_status", side_effect=mock_get_device_status, diff --git a/tests/components/alarm_control_panel/conftest.py b/tests/components/alarm_control_panel/conftest.py index 620b74dd80e..3e82b935493 100644 --- a/tests/components/alarm_control_panel/conftest.py +++ b/tests/components/alarm_control_panel/conftest.py @@ -1,9 +1,9 @@ """Fixturs for Alarm Control Panel tests.""" +from collections.abc import Generator from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, @@ -129,7 +129,7 @@ async def code_arm_required() -> bool: @pytest.fixture(name="supported_features") -async def lock_supported_features() -> AlarmControlPanelEntityFeature: +async def alarm_control_panel_supported_features() -> AlarmControlPanelEntityFeature: """Return the supported features for the test alarm control panel entity.""" return ( AlarmControlPanelEntityFeature.ARM_AWAY @@ -142,7 +142,7 @@ async def lock_supported_features() -> AlarmControlPanelEntityFeature: @pytest.fixture(name="mock_alarm_control_panel_entity") -async def setup_lock_platform_test_entity( +async def setup_alarm_control_panel_platform_test_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, code_format: CodeFormat | None, diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 15a4bd6d9a1..162149f095b 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -48,6 +48,41 @@ from .test_common import ( from tests.common import async_mock_service +@pytest.mark.parametrize( + ( + "current_activity", + "activity_list", + ), + [ + ("TV", ["TV", "MUSIC", "DVD"]), + ("TV", ["TV"]), + ], +) +async def test_discovery_remote( + hass: HomeAssistant, current_activity: str, activity_list: list[str] +) -> None: + """Test discory for a remote entity.""" + request = get_new_request("Alexa.Discovery", "Discover") + # setup test device + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": current_activity, + "activity_list": activity_list, + }, + ) + msg = await smart_home.async_handle_message(hass, get_default_config(hass), request) + assert "event" in msg + msg = msg["event"] + assert len(msg["payload"]["endpoints"]) == 1 + endpoint = msg["payload"]["endpoints"][0] + assert endpoint["endpointId"] == "remote#test" + interfaces = {capability["interface"] for capability in endpoint["capabilities"]} + assert "Alexa.PowerController" in interfaces + assert "Alexa.ModeController" in interfaces + + @pytest.mark.parametrize("adjust", ["-5", "5", "-80"]) async def test_api_adjust_brightness(hass: HomeAssistant, adjust: str) -> None: """Test api adjust brightness process.""" @@ -199,7 +234,6 @@ async def test_api_increase_color_temp( ("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1), ("media_player", "SATELLITE TV", ["satellite-tv", "game console", None], 0), ("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0), - ("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None), ], ) async def test_api_select_input( @@ -220,18 +254,6 @@ async def test_api_select_input( }, ) - # test where no source matches - if idx is None: - await assert_request_fails( - "Alexa.InputController", - "SelectInput", - "media_player#test", - "media_player.select_source", - hass, - payload={"input": payload}, - ) - return - call, _ = await assert_request_calls_service( "Alexa.InputController", "SelectInput", @@ -243,6 +265,130 @@ async def test_api_select_input( assert call.data["source"] == source_list[idx] +@pytest.mark.parametrize( + ("source_list"), + [(["satellite_tv", "game console"]), ([])], +) +async def test_api_select_input_fails( + hass: HomeAssistant, + source_list: list[Any], +) -> None: + """Test api set input process fails.""" + hass.states.async_set( + "media_player.test", + "off", + { + "friendly_name": "Test media player", + "source": "unknown", + "source_list": source_list, + }, + ) + await assert_request_fails( + "Alexa.InputController", + "SelectInput", + "media_player#test", + "media_player.select_source", + hass, + payload={"input": "BAD DEVICE"}, + ) + + +@pytest.mark.parametrize( + ("activity", "activity_list", "target_activity_index"), + [ + ("TV", ["TV", "MUSIC", "DVD"], 0), + ("MUSIC", ["TV", "MUSIC", "DVD", 1000], 1), + ("DVD", ["TV", "MUSIC", "DVD", None], 2), + ("TV", ["TV"], 0), + ], +) +async def test_api_select_activity( + hass: HomeAssistant, + activity: str, + activity_list: list[str], + target_activity_index: int | None, +) -> None: + """Test api set activity process.""" + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": activity, + "activity_list": activity_list, + }, + ) + call, _ = await assert_request_calls_service( + "Alexa.ModeController", + "SetMode", + "remote#test", + "remote.turn_on", + hass, + payload={"mode": f"activity.{activity}"}, + instance="remote.activity", + ) + assert call.data["activity"] == activity_list[target_activity_index] + + +@pytest.mark.parametrize(("activity_list"), [(["TV", "MUSIC", "DVD"]), ([])]) +async def test_api_select_activity_fails( + hass: HomeAssistant, activity_list: list[str] +) -> None: + """Test api set activity process fails.""" + hass.states.async_set( + "remote.test", + "off", + { + "current_activity": None, + "activity_list": activity_list, + }, + ) + await assert_request_fails( + "Alexa.ModeController", + "SetMode", + "remote#test", + "remote.turn_on", + hass, + payload={"mode": "activity.BAD"}, + instance="remote.activity", + ) + + +@pytest.mark.parametrize( + ( + "current_state", + "target_name", + "target_service", + ), + [ + ("on", "TurnOff", "turn_off"), + ("off", "TurnOn", "turn_on"), + ], +) +async def test_api_remote_set_power_state( + hass: HomeAssistant, + current_state: str, + target_name: str, + target_service: str, +) -> None: + """Test api remote set power state process.""" + hass.states.async_set( + "remote.test", + current_state, + { + "current_activity": ["TV", "MUSIC", "DVD"], + "activity_list": "TV", + }, + ) + + _, msg = await assert_request_calls_service( + "Alexa.PowerController", + target_name, + "remote#test", + f"remote.{target_service}", + hass, + ) + + async def test_report_lock_state(hass: HomeAssistant) -> None: """Test LockController implements lockState property.""" hass.states.async_set("lock.locked", STATE_LOCKED, {}) @@ -619,6 +765,62 @@ async def test_report_fan_direction(hass: HomeAssistant) -> None: properties.assert_equal("Alexa.ModeController", "mode", "direction.forward") +async def test_report_remote_power(hass: HomeAssistant) -> None: + """Test ModeController reports remote power state correctly.""" + hass.states.async_set( + "remote.off", + "off", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.on", + "on", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + + properties = await reported_properties(hass, "remote#off") + properties.assert_equal("Alexa.PowerController", "powerState", "OFF") + + properties = await reported_properties(hass, "remote#on") + properties.assert_equal("Alexa.PowerController", "powerState", "ON") + + +async def test_report_remote_activity(hass: HomeAssistant) -> None: + """Test ModeController reports remote activity correctly.""" + hass.states.async_set( + "remote.unknown", + "on", + {"current_activity": "UNKNOWN"}, + ) + hass.states.async_set( + "remote.tv", + "on", + {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.music", + "on", + {"current_activity": "MUSIC", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + hass.states.async_set( + "remote.dvd", + "on", + {"current_activity": "DVD", "activity_list": ["TV", "MUSIC", "DVD"]}, + ) + + properties = await reported_properties(hass, "remote#unknown") + properties.assert_not_has_property("Alexa.ModeController", "mode") + + properties = await reported_properties(hass, "remote#tv") + properties.assert_equal("Alexa.ModeController", "mode", "activity.TV") + + properties = await reported_properties(hass, "remote#music") + properties.assert_equal("Alexa.ModeController", "mode", "activity.MUSIC") + + properties = await reported_properties(hass, "remote#dvd") + properties.assert_equal("Alexa.ModeController", "mode", "activity.DVD") + + async def test_report_cover_range_value(hass: HomeAssistant) -> None: """Test RangeController reports cover position correctly.""" hass.states.async_set( diff --git a/tests/components/amberelectric/conftest.py b/tests/components/amberelectric/conftest.py index 9de865fae6c..ce4073db71b 100644 --- a/tests/components/amberelectric/conftest.py +++ b/tests/components/amberelectric/conftest.py @@ -1,9 +1,9 @@ """Provide common Amber fixtures.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/amberelectric/test_binary_sensor.py b/tests/components/amberelectric/test_binary_sensor.py index 1e5eb572e07..2c1ee22b644 100644 --- a/tests/components/amberelectric/test_binary_sensor.py +++ b/tests/components/amberelectric/test_binary_sensor.py @@ -8,6 +8,7 @@ from unittest.mock import Mock, patch from amberelectric.model.channel import ChannelType from amberelectric.model.current_interval import CurrentInterval from amberelectric.model.interval import SpikeStatus +from amberelectric.model.tariff_information import TariffInformation from dateutil import parser import pytest @@ -111,7 +112,7 @@ async def setup_spike(hass: HomeAssistant) -> AsyncGenerator[Mock]: @pytest.mark.usefixtures("setup_no_spike") def test_no_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -122,7 +123,7 @@ def test_no_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_potential_spike") def test_potential_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "off" @@ -133,9 +134,85 @@ def test_potential_spike_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_spike") def test_spike_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("binary_sensor.mock_title_price_spike") assert sensor assert sensor.state == "on" assert sensor.attributes["icon"] == "mdi:power-plug-off" assert sensor.attributes["spike_status"] == "spike" + + +@pytest.fixture +async def setup_inactive_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: + """Set up general channel.""" + MockConfigEntry( + domain="amberelectric", + data={ + CONF_SITE_NAME: "mock_title", + CONF_API_TOKEN: MOCK_API_TOKEN, + CONF_SITE_ID: GENERAL_ONLY_SITE_ID, + }, + ).add_to_hass(hass) + + instance = Mock() + with patch( + "amberelectric.api.AmberApi.create", + return_value=instance, + ) as mock_update: + general_channel: list[CurrentInterval] = [ + generate_current_interval( + ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") + ), + ] + general_channel[0].tariff_information = TariffInformation(demandWindow=False) + instance.get_current_price = Mock(return_value=general_channel) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + yield mock_update.return_value + + +@pytest.fixture +async def setup_active_demand_window(hass: HomeAssistant) -> AsyncGenerator[Mock]: + """Set up general channel.""" + MockConfigEntry( + domain="amberelectric", + data={ + CONF_SITE_NAME: "mock_title", + CONF_API_TOKEN: MOCK_API_TOKEN, + CONF_SITE_ID: GENERAL_ONLY_SITE_ID, + }, + ).add_to_hass(hass) + + instance = Mock() + with patch( + "amberelectric.api.AmberApi.create", + return_value=instance, + ) as mock_update: + general_channel: list[CurrentInterval] = [ + generate_current_interval( + ChannelType.GENERAL, parser.parse("2021-09-21T08:30:00+10:00") + ), + ] + general_channel[0].tariff_information = TariffInformation(demandWindow=True) + instance.get_current_price = Mock(return_value=general_channel) + assert await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + yield mock_update.return_value + + +@pytest.mark.usefixtures("setup_inactive_demand_window") +def test_inactive_demand_window_sensor(hass: HomeAssistant) -> None: + """Testing the creation of the Amber demand_window sensor.""" + assert len(hass.states.async_all()) == 6 + sensor = hass.states.get("binary_sensor.mock_title_demand_window") + assert sensor + assert sensor.state == "off" + + +@pytest.mark.usefixtures("setup_active_demand_window") +def test_active_demand_window_sensor(hass: HomeAssistant) -> None: + """Testing the creation of the Amber demand_window sensor.""" + assert len(hass.states.async_all()) == 6 + sensor = hass.states.get("binary_sensor.mock_title_demand_window") + assert sensor + assert sensor.state == "on" diff --git a/tests/components/amberelectric/test_sensor.py b/tests/components/amberelectric/test_sensor.py index 3c0910f0afc..3a5626d14d5 100644 --- a/tests/components/amberelectric/test_sensor.py +++ b/tests/components/amberelectric/test_sensor.py @@ -105,7 +105,7 @@ async def setup_general_and_feed_in(hass: HomeAssistant) -> AsyncGenerator[Mock] async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> None: """Test the General Price sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_price") assert price assert price.state == "0.08" @@ -143,7 +143,7 @@ async def test_general_price_sensor(hass: HomeAssistant, setup_general: Mock) -> @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> None: """Test the Controlled Price sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_price") assert price assert price.state == "0.08" @@ -165,7 +165,7 @@ async def test_general_and_controlled_load_price_sensor(hass: HomeAssistant) -> @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_general_and_feed_in_price_sensor(hass: HomeAssistant) -> None: """Test the Feed In sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_price") assert price assert price.state == "-0.08" @@ -188,7 +188,7 @@ async def test_general_forecast_sensor( hass: HomeAssistant, setup_general: Mock ) -> None: """Test the General Forecast sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_forecast") assert price assert price.state == "0.09" @@ -230,7 +230,7 @@ async def test_general_forecast_sensor( @pytest.mark.usefixtures("setup_general_and_controlled_load") async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: """Test the Controlled Load Forecast sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_forecast") assert price assert price.state == "0.09" @@ -254,7 +254,7 @@ async def test_controlled_load_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general_and_feed_in") async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: """Test the Feed In Forecast sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_forecast") assert price assert price.state == "-0.09" @@ -278,7 +278,7 @@ async def test_feed_in_forecast_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_renewable_sensor(hass: HomeAssistant) -> None: """Testing the creation of the Amber renewables sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 sensor = hass.states.get("sensor.mock_title_renewables") assert sensor assert sensor.state == "51" @@ -287,7 +287,7 @@ def test_renewable_sensor(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_general") def test_general_price_descriptor_descriptor_sensor(hass: HomeAssistant) -> None: """Test the General Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 5 + assert len(hass.states.async_all()) == 6 price = hass.states.get("sensor.mock_title_general_price_descriptor") assert price assert price.state == "extremely_low" @@ -298,7 +298,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( hass: HomeAssistant, ) -> None: """Test the Controlled Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_controlled_load_price_descriptor") assert price assert price.state == "extremely_low" @@ -307,7 +307,7 @@ def test_general_and_controlled_load_price_descriptor_sensor( @pytest.mark.usefixtures("setup_general_and_feed_in") def test_general_and_feed_in_price_descriptor_sensor(hass: HomeAssistant) -> None: """Test the Feed In Price Descriptor sensor.""" - assert len(hass.states.async_all()) == 8 + assert len(hass.states.async_all()) == 9 price = hass.states.get("sensor.mock_title_feed_in_price_descriptor") assert price assert price.state == "extremely_low" diff --git a/tests/components/ambient_network/conftest.py b/tests/components/ambient_network/conftest.py index 2900f8ae5fe..9fc001252a0 100644 --- a/tests/components/ambient_network/conftest.py +++ b/tests/components/ambient_network/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Ambient Weather Network integration tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioambient import OpenAPI import pytest -from typing_extensions import Generator from homeassistant.components import ambient_network from homeassistant.core import HomeAssistant diff --git a/tests/components/ambient_station/conftest.py b/tests/components/ambient_station/conftest.py index e4f067108a5..160c05ad996 100644 --- a/tests/components/ambient_station/conftest.py +++ b/tests/components/ambient_station/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Ambient PWS.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/ambient_station/test_diagnostics.py b/tests/components/ambient_station/test_diagnostics.py index 05161ba32cd..82db72eb9ca 100644 --- a/tests/components/ambient_station/test_diagnostics.py +++ b/tests/components/ambient_station/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Ambient PWS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.ambient_station import AmbientStationConfigEntry from homeassistant.core import HomeAssistant @@ -20,7 +21,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" ambient = config_entry.runtime_data ambient.stations = data_station - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/analytics/test_analytics.py b/tests/components/analytics/test_analytics.py index 60882cda874..28272cd8866 100644 --- a/tests/components/analytics/test_analytics.py +++ b/tests/components/analytics/test_analytics.py @@ -19,7 +19,6 @@ from homeassistant.components.analytics.const import ( ATTR_STATISTICS, ATTR_USAGE, ) -from homeassistant.components.recorder import Recorder from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,7 +35,7 @@ MOCK_VERSION_NIGHTLY = "1970.1.0.dev19700101" @pytest.fixture(autouse=True) -def uuid_mock() -> Generator[Any, Any, None]: +def uuid_mock() -> Generator[None]: """Mock the UUID.""" with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex_mock: hex_mock.return_value = MOCK_UUID @@ -44,7 +43,7 @@ def uuid_mock() -> Generator[Any, Any, None]: @pytest.fixture(autouse=True) -def ha_version_mock() -> Generator[Any, Any, None]: +def ha_version_mock() -> Generator[None]: """Mock the core version.""" with patch( "homeassistant.components.analytics.analytics.HA_VERSION", @@ -54,7 +53,7 @@ def ha_version_mock() -> Generator[Any, Any, None]: @pytest.fixture -def installation_type_mock() -> Generator[Any, Any, None]: +def installation_type_mock() -> Generator[None]: """Mock the async_get_system_info.""" with patch( "homeassistant.components.analytics.analytics.async_get_system_info", @@ -160,11 +159,11 @@ async def test_failed_to_send_raises( assert "Error sending analytics" in caplog.text +@pytest.mark.usefixtures("installation_type_mock") async def test_send_base( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -231,11 +230,11 @@ async def test_send_base_with_supervisor( assert snapshot == submitted_data +@pytest.mark.usefixtures("installation_type_mock") async def test_send_usage( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences are defined.""" @@ -331,11 +330,11 @@ async def test_send_usage_with_supervisor( assert snapshot == submitted_data +@pytest.mark.usefixtures("installation_type_mock") async def test_send_statistics( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics preferences are defined.""" @@ -382,12 +381,11 @@ async def test_send_statistics_one_integration_fails( assert post_call[2]["integration_count"] == 0 -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_statistics_disabled_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with disabled integration.""" @@ -420,12 +418,11 @@ async def test_send_statistics_disabled_integration( assert snapshot == submitted_data -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_statistics_ignored_integration( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send statistics with ignored integration.""" @@ -566,12 +563,11 @@ async def test_reusing_uuid( assert analytics.uuid == "NOT_MOCK_UUID" -@pytest.mark.usefixtures("enable_custom_integrations") +@pytest.mark.usefixtures("enable_custom_integrations", "installation_type_mock") async def test_custom_integrations( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test sending custom integrations.""" @@ -651,12 +647,11 @@ async def test_nightly_endpoint( assert str(payload[1]) == ANALYTICS_ENDPOINT_URL -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_with_no_energy( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -688,12 +683,11 @@ async def test_send_with_no_energy( assert snapshot == submitted_data -@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") async def test_send_with_no_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -720,12 +714,11 @@ async def test_send_with_no_energy_config( ) -@pytest.mark.usefixtures("recorder_mock", "mock_hass_config") +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock", "mock_hass_config") async def test_send_with_energy_config( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send base preferences are defined.""" @@ -752,12 +745,11 @@ async def test_send_with_energy_config( ) -@pytest.mark.usefixtures("mock_hass_config") +@pytest.mark.usefixtures("installation_type_mock", "mock_hass_config") async def test_send_usage_with_certificate( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test send usage preferences with certificate.""" @@ -779,12 +771,11 @@ async def test_send_usage_with_certificate( assert snapshot == submitted_data +@pytest.mark.usefixtures("recorder_mock", "installation_type_mock") async def test_send_with_recorder( - recorder_mock: Recorder, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test recorder information.""" @@ -849,11 +840,11 @@ async def test_timeout_while_sending( assert "Timeout sending analytics" in caplog.text +@pytest.mark.usefixtures("installation_type_mock") async def test_not_check_config_entries_if_yaml( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, aioclient_mock: AiohttpClientMocker, - installation_type_mock: Generator[Any, Any, None], snapshot: SnapshotAssertion, ) -> None: """Test skip config entry check if defined in yaml.""" diff --git a/tests/components/analytics_insights/conftest.py b/tests/components/analytics_insights/conftest.py index 75d47c41f4e..fcdda95e9bd 100644 --- a/tests/components/analytics_insights/conftest.py +++ b/tests/components/analytics_insights/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Homeassistant Analytics tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from python_homeassistant_analytics import CurrentAnalytics from python_homeassistant_analytics.models import CustomIntegration, Integration -from typing_extensions import Generator from homeassistant.components.analytics_insights.const import ( CONF_TRACKED_CUSTOM_INTEGRATIONS, diff --git a/tests/components/androidtv/conftest.py b/tests/components/androidtv/conftest.py index befb9db7a8c..a075ed66079 100644 --- a/tests/components/androidtv/conftest.py +++ b/tests/components/androidtv/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Android TV integration tests.""" +from collections.abc import Generator from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from . import patchers diff --git a/tests/components/androidtv/patchers.py b/tests/components/androidtv/patchers.py index 90a13523ebe..1c32e1770e0 100644 --- a/tests/components/androidtv/patchers.py +++ b/tests/components/androidtv/patchers.py @@ -37,7 +37,7 @@ class AdbDeviceTcpAsyncFake: """Try to connect to a device.""" raise NotImplementedError - async def shell(self, cmd, *args, **kwargs): + async def shell(self, cmd, *args, **kwargs) -> bytes | str | None: """Send an ADB shell command.""" return None diff --git a/tests/components/androidtv_remote/conftest.py b/tests/components/androidtv_remote/conftest.py index aa5583927d1..05e40991ff9 100644 --- a/tests/components/androidtv_remote/conftest.py +++ b/tests/components/androidtv_remote/conftest.py @@ -1,10 +1,9 @@ """Fixtures for the Android TV Remote integration tests.""" -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.androidtv_remote.const import DOMAIN from homeassistant.config_entries import ConfigEntryState diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index ad7c049e32f..46678f18fd3 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -345,7 +345,7 @@ async def test_browse_media( ) response = await client.receive_json() assert response["success"] - assert { + assert response["result"] == { "title": "Applications", "media_class": "directory", "media_content_type": "apps", @@ -377,7 +377,7 @@ async def test_browse_media( "thumbnail": "", }, ], - } == response["result"] + } async def test_media_player_connection_closed( diff --git a/tests/components/aosmith/conftest.py b/tests/components/aosmith/conftest.py index d67ae1ea627..7efbe0c58b2 100644 --- a/tests/components/aosmith/conftest.py +++ b/tests/components/aosmith/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for the A. O. Smith tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from py_aosmith import AOSmithAPIClient @@ -14,7 +15,6 @@ from py_aosmith.models import ( SupportedOperationModeInfo, ) import pytest -from typing_extensions import Generator from homeassistant.components.aosmith.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/aosmith/snapshots/test_device.ambr b/tests/components/aosmith/snapshots/test_device.ambr index d563090ce9d..dec33a92fe2 100644 --- a/tests/components/aosmith/snapshots/test_device.ambr +++ b/tests/components/aosmith/snapshots/test_device.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'A. O. Smith', 'model': 'HPTS-50 200 202172000', + 'model_id': None, 'name': 'My water heater', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/aosmith/test_sensor.py b/tests/components/aosmith/test_sensor.py index a77e4e4576d..1dc632b5e84 100644 --- a/tests/components/aosmith/test_sensor.py +++ b/tests/components/aosmith/test_sensor.py @@ -1,10 +1,10 @@ """Tests for the sensor platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[list[str]]: +async def platforms() -> AsyncGenerator[None]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SENSOR]): yield diff --git a/tests/components/aosmith/test_water_heater.py b/tests/components/aosmith/test_water_heater.py index ab4a4a33bca..69ad8004fc2 100644 --- a/tests/components/aosmith/test_water_heater.py +++ b/tests/components/aosmith/test_water_heater.py @@ -1,11 +1,11 @@ """Tests for the water heater platform of the A. O. Smith integration.""" +from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch from py_aosmith.models import OperationMode import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator from homeassistant.components.water_heater import ( ATTR_AWAY_MODE, @@ -29,7 +29,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def platforms() -> AsyncGenerator[list[str]]: +async def platforms() -> AsyncGenerator[None]: """Return the platforms to be loaded for this test.""" with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.WATER_HEATER]): yield diff --git a/tests/components/apcupsd/test_sensor.py b/tests/components/apcupsd/test_sensor.py index 0c7d174a5e8..0fe7f12ad27 100644 --- a/tests/components/apcupsd/test_sensor.py +++ b/tests/components/apcupsd/test_sensor.py @@ -15,6 +15,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE, + STATE_UNKNOWN, UnitOfElectricPotential, UnitOfPower, UnitOfTime, @@ -25,7 +26,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util import slugify from homeassistant.util.dt import utcnow -from . import MOCK_STATUS, async_init_integration +from . import MOCK_MINIMAL_STATUS, MOCK_STATUS, async_init_integration from tests.common import async_fire_time_changed @@ -237,3 +238,34 @@ async def test_multiple_manual_update_entity(hass: HomeAssistant) -> None: blocking=True, ) assert mock_request_status.call_count == 1 + + +async def test_sensor_unknown(hass: HomeAssistant) -> None: + """Test if our integration can properly certain sensors as unknown when it becomes so.""" + await async_init_integration(hass, status=MOCK_MINIMAL_STATUS) + + assert hass.states.get("sensor.mode").state == MOCK_MINIMAL_STATUS["UPSMODE"] + # Last self test sensor should be added even if our status does not report it initially (it is + # a sensor that appears only after a periodical or manual self test is performed). + assert hass.states.get("sensor.last_self_test") is not None + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN + + # Simulate an event (a self test) such that "LASTSTEST" field is being reported, the state of + # the sensor should be properly updated with the corresponding value. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS | { + "LASTSTEST": "1970-01-01 00:00:00 0000" + } + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + assert hass.states.get("sensor.last_self_test").state == "1970-01-01 00:00:00 0000" + + # Simulate another event (e.g., daemon restart) such that "LASTSTEST" is no longer reported. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + # The state should become unknown again. + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN diff --git a/tests/components/api/test_init.py b/tests/components/api/test_init.py index a1453315dbf..abce262fd12 100644 --- a/tests/components/api/test_init.py +++ b/tests/components/api/test_init.py @@ -3,6 +3,7 @@ import asyncio from http import HTTPStatus import json +from typing import Any from unittest.mock import patch from aiohttp import ServerDisconnectedError, web @@ -355,6 +356,67 @@ async def test_api_call_service_with_data( assert state["attributes"] == {"data": 1} +SERVICE_DICT = {"changed_states": [], "service_response": {"foo": "bar"}} +RESP_REQUIRED = { + "message": ( + "Service call requires responses but caller did not ask for " + "responses. Add ?return_response to query parameters." + ) +} +RESP_UNSUPPORTED = { + "message": "Service does not support responses. Remove return_response from request." +} + + +@pytest.mark.parametrize( + ( + "supports_response", + "requested_response", + "expected_number_of_service_calls", + "expected_status", + "expected_response", + ), + [ + (ha.SupportsResponse.ONLY, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.ONLY, False, 0, HTTPStatus.BAD_REQUEST, RESP_REQUIRED), + (ha.SupportsResponse.OPTIONAL, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.OPTIONAL, False, 1, HTTPStatus.OK, []), + (ha.SupportsResponse.NONE, True, 0, HTTPStatus.BAD_REQUEST, RESP_UNSUPPORTED), + (ha.SupportsResponse.NONE, False, 1, HTTPStatus.OK, []), + ], +) +async def test_api_call_service_returns_response_requested_response( + hass: HomeAssistant, + mock_api_client: TestClient, + supports_response: ha.SupportsResponse, + requested_response: bool, + expected_number_of_service_calls: int, + expected_status: int, + expected_response: Any, +) -> None: + """Test if the API allows us to call a service.""" + test_value = [] + + @ha.callback + def listener(service_call): + """Record that our service got called.""" + test_value.append(1) + return {"foo": "bar"} + + hass.services.async_register( + "test_domain", "test_service", listener, supports_response=supports_response + ) + + resp = await mock_api_client.post( + "/api/services/test_domain/test_service" + + ("?return_response" if requested_response else "") + ) + assert resp.status == expected_status + await hass.async_block_till_done() + assert len(test_value) == expected_number_of_service_calls + assert await resp.json() == expected_response + + async def test_api_call_service_client_closed( hass: HomeAssistant, mock_api_client: TestClient ) -> None: @@ -770,4 +832,43 @@ async def test_api_core_state(hass: HomeAssistant, mock_api_client: TestClient) resp = await mock_api_client.get("/api/core/state") assert resp.status == HTTPStatus.OK json = await resp.json() - assert json["state"] == "RUNNING" + assert json == { + "state": "RUNNING", + "recorder_state": {"migration_in_progress": False, "migration_is_live": False}, + } + + +@pytest.mark.parametrize( + ("migration_in_progress", "migration_is_live"), + [ + (False, False), + (False, True), + (True, False), + (True, True), + ], +) +async def test_api_core_state_recorder_migrating( + hass: HomeAssistant, + mock_api_client: TestClient, + migration_in_progress: bool, + migration_is_live: bool, +) -> None: + """Test getting core status.""" + with ( + patch( + "homeassistant.helpers.recorder.async_migration_in_progress", + return_value=migration_in_progress, + ), + patch( + "homeassistant.helpers.recorder.async_migration_is_live", + return_value=migration_is_live, + ), + ): + resp = await mock_api_client.get("/api/core/state") + assert resp.status == HTTPStatus.OK + json = await resp.json() + expected_recorder_state = { + "migration_in_progress": migration_in_progress, + "migration_is_live": migration_is_live, + } + assert json == {"state": "RUNNING", "recorder_state": expected_recorder_state} diff --git a/tests/components/apple_tv/conftest.py b/tests/components/apple_tv/conftest.py index 36061924db5..78982a8d51c 100644 --- a/tests/components/apple_tv/conftest.py +++ b/tests/components/apple_tv/conftest.py @@ -1,12 +1,12 @@ """Fixtures for component.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyatv import conf from pyatv.const import PairingRequirement, Protocol from pyatv.support import http import pytest -from typing_extensions import Generator from .common import MockPairingHandler, airplay_service, create_conf, mrp_service diff --git a/tests/components/apple_tv/test_config_flow.py b/tests/components/apple_tv/test_config_flow.py index b8f49e7c8f5..f37042a6f50 100644 --- a/tests/components/apple_tv/test_config_flow.py +++ b/tests/components/apple_tv/test_config_flow.py @@ -1,12 +1,12 @@ """Test config flow.""" +from collections.abc import Generator from ipaddress import IPv4Address, ip_address from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from pyatv import exceptions from pyatv.const import PairingRequirement, Protocol import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index c427b1d07e0..e6fdf568bcc 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow from homeassistant.components.application_credentials import ( diff --git a/tests/components/aprilaire/test_config_flow.py b/tests/components/aprilaire/test_config_flow.py index c9cba2b3fd6..e4b7c167256 100644 --- a/tests/components/aprilaire/test_config_flow.py +++ b/tests/components/aprilaire/test_config_flow.py @@ -104,7 +104,7 @@ async def test_config_flow_data(client: AprilaireClient, hass: HomeAssistant) -> abort_if_unique_id_configured_mock.assert_called_once() create_entry_mock.assert_called_once_with( - title="Aprilaire", + title="AprilAire", data={ "host": "localhost", "port": 7000, diff --git a/tests/components/aprs/test_device_tracker.py b/tests/components/aprs/test_device_tracker.py index 4cdff41598f..4142195b0b9 100644 --- a/tests/components/aprs/test_device_tracker.py +++ b/tests/components/aprs/test_device_tracker.py @@ -1,11 +1,11 @@ """Test APRS device tracker.""" +from collections.abc import Generator from unittest.mock import MagicMock, Mock, patch import aprslib from aprslib import IS import pytest -from typing_extensions import Generator from homeassistant.components.aprs import device_tracker from homeassistant.core import HomeAssistant diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index cd04346c070..c191c7ca2dc 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the APsystems Local API tests.""" -from unittest.mock import AsyncMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData +from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData, Status import pytest -from typing_extensions import Generator from homeassistant.components.apsystems.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_apsystems() -> Generator[AsyncMock, None, None]: +def mock_apsystems() -> Generator[MagicMock]: """Mock APSystems lib.""" with ( patch( @@ -52,6 +52,7 @@ def mock_apsystems() -> Generator[AsyncMock, None, None]: e2=6.0, te2=7.0, ) + mock_api.get_device_power_status.return_value = Status.normal yield mock_api diff --git a/tests/components/apsystems/snapshots/test_switch.ambr b/tests/components/apsystems/snapshots/test_switch.ambr new file mode 100644 index 00000000000..6daa9fd6e14 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_inverter_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_inverter_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_status', + 'unique_id': 'MY_SERIAL_NUMBER_inverter_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_inverter_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Inverter status', + }), + 'context': , + 'entity_id': 'switch.mock_title_inverter_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/test_config_flow.py b/tests/components/apsystems/test_config_flow.py index e3fcdf67dcc..3d78524a529 100644 --- a/tests/components/apsystems/test_config_flow.py +++ b/tests/components/apsystems/test_config_flow.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from homeassistant.components.apsystems.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_IP_ADDRESS +from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -27,6 +27,24 @@ async def test_form_create_success( assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" +async def test_form_create_success_custom_port( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_apsystems: AsyncMock +) -> None: + """Test we handle creating with custom port with success.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_IP_ADDRESS: "127.0.0.1", + CONF_PORT: 8042, + }, + ) + assert result["result"].unique_id == "MY_SERIAL_NUMBER" + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" + assert result["data"].get(CONF_PORT) == 8042 + + async def test_form_cannot_connect_and_recover( hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock ) -> None: @@ -57,6 +75,33 @@ async def test_form_cannot_connect_and_recover( assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" +async def test_form_cannot_connect_and_recover_custom_port( + hass: HomeAssistant, mock_apsystems: AsyncMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle cannot connect error but recovering with custom port.""" + + mock_apsystems.get_device_info.side_effect = TimeoutError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_IP_ADDRESS: "127.0.0.2", CONF_PORT: 8042}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_apsystems.get_device_info.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_IP_ADDRESS: "127.0.0.1", CONF_PORT: 8042}, + ) + assert result2["result"].unique_id == "MY_SERIAL_NUMBER" + assert result2.get("type") is FlowResultType.CREATE_ENTRY + assert result2["data"].get(CONF_IP_ADDRESS) == "127.0.0.1" + assert result2["data"].get(CONF_PORT) == 8042 + + async def test_form_unique_id_already_configured( hass: HomeAssistant, mock_setup_entry: AsyncMock, diff --git a/tests/components/apsystems/test_switch.py b/tests/components/apsystems/test_switch.py new file mode 100644 index 00000000000..afd889fe958 --- /dev/null +++ b/tests/components/apsystems/test_switch.py @@ -0,0 +1,31 @@ +"""Test the APSystem switch module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.SWITCH], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) diff --git a/tests/components/aquacell/conftest.py b/tests/components/aquacell/conftest.py index db27f51dc03..f5a741ceed8 100644 --- a/tests/components/aquacell/conftest.py +++ b/tests/components/aquacell/conftest.py @@ -2,7 +2,7 @@ from collections.abc import Generator from datetime import datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from aioaquacell import AquacellApi, Softener import pytest @@ -19,7 +19,7 @@ from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.aquacell.async_setup_entry", return_value=True @@ -28,7 +28,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_aquacell_api() -> Generator[AsyncMock, None, None]: +def mock_aquacell_api() -> Generator[MagicMock]: """Build a fixture for the Aquacell API that authenticates successfully and returns a single softener.""" with ( patch( diff --git a/tests/components/arcam_fmj/conftest.py b/tests/components/arcam_fmj/conftest.py index 66850933cc7..ca4af1b00a3 100644 --- a/tests/components/arcam_fmj/conftest.py +++ b/tests/components/arcam_fmj/conftest.py @@ -1,11 +1,11 @@ """Tests for the arcam_fmj component.""" +from collections.abc import AsyncGenerator from unittest.mock import Mock, patch from arcam.fmj.client import Client from arcam.fmj.state import State import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.arcam_fmj.const import DEFAULT_NAME from homeassistant.components.arcam_fmj.media_player import ArcamFmj @@ -99,6 +99,7 @@ async def player_setup_fixture( return state_1 if zone == 2: return state_2 + raise ValueError(f"Unknown player zone: {zone}") await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/arcam_fmj/test_config_flow.py b/tests/components/arcam_fmj/test_config_flow.py index 26e93354900..60c68c5e102 100644 --- a/tests/components/arcam_fmj/test_config_flow.py +++ b/tests/components/arcam_fmj/test_config_flow.py @@ -1,15 +1,14 @@ """Tests for the Arcam FMJ config flow module.""" +from collections.abc import Generator from dataclasses import replace from unittest.mock import AsyncMock, MagicMock, patch from arcam.fmj.client import ConnectionFailed import pytest -from typing_extensions import Generator from homeassistant.components import ssdp -from homeassistant.components.arcam_fmj.config_flow import get_entry_client -from homeassistant.components.arcam_fmj.const import DOMAIN, DOMAIN_DATA_ENTRIES +from homeassistant.components.arcam_fmj.const import DOMAIN from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant @@ -215,12 +214,3 @@ async def test_user_wrong( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == f"Arcam FMJ ({MOCK_HOST})" assert result["result"].unique_id is None - - -async def test_get_entry_client(hass: HomeAssistant) -> None: - """Test helper for configuration.""" - entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG_ENTRY, title=MOCK_NAME, unique_id=MOCK_UUID - ) - hass.data[DOMAIN_DATA_ENTRIES] = {entry.entry_id: "dummy"} - assert get_entry_client(hass, entry) == "dummy" diff --git a/tests/components/arve/conftest.py b/tests/components/arve/conftest.py index 40a5f98291b..8fc35e37000 100644 --- a/tests/components/arve/conftest.py +++ b/tests/components/arve/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Arve tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from asyncarve import ArveCustomer, ArveDevices, ArveSensPro, ArveSensProData import pytest -from typing_extensions import Generator from homeassistant.components.arve.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index f19e70a8ec1..b2eca1e7ce1 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -2,16 +2,21 @@ from __future__ import annotations -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from pathlib import Path from typing import Any from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components import stt, tts, wake_word from homeassistant.components.assist_pipeline import DOMAIN, select as assist_select +from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( PipelineData, PipelineStorageCollection, @@ -34,6 +39,8 @@ from tests.common import ( _TRANSCRIPT = "test transcript" +BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS + @pytest.fixture(autouse=True) def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: @@ -463,3 +470,8 @@ def pipeline_data(hass: HomeAssistant, init_components) -> PipelineData: def pipeline_storage(pipeline_data) -> PipelineStorageCollection: """Return pipeline storage collection.""" return pipeline_data.pipeline_store + + +def make_10ms_chunk(header: bytes) -> bytes: + """Return 10ms of zeros with the given header.""" + return header + bytes(BYTES_PER_CHUNK - len(header)) diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 2c506215c68..e5ae18d28f2 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -440,7 +440,7 @@ # --- # name: test_device_capture_override.2 dict({ - 'audio': 'Y2h1bmsx', + 'audio': 'Y2h1bmsxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', 'channels': 1, 'rate': 16000, 'type': 'audio', @@ -663,7 +663,10 @@ # name: test_stt_stream_failed.2 None # --- -# name: test_text_only_pipeline +# name: test_text_only_pipeline.3 + None +# --- +# name: test_text_only_pipeline[extra_msg0] dict({ 'language': 'en', 'pipeline': , @@ -673,7 +676,7 @@ }), }) # --- -# name: test_text_only_pipeline.1 +# name: test_text_only_pipeline[extra_msg0].1 dict({ 'conversation_id': 'mock-conversation-id', 'device_id': 'mock-device-id', @@ -682,7 +685,7 @@ 'language': 'en', }) # --- -# name: test_text_only_pipeline.2 +# name: test_text_only_pipeline[extra_msg0].2 dict({ 'intent_output': dict({ 'conversation_id': None, @@ -704,7 +707,51 @@ }), }) # --- -# name: test_text_only_pipeline.3 +# name: test_text_only_pipeline[extra_msg0].3 + None +# --- +# name: test_text_only_pipeline[extra_msg1] + dict({ + 'language': 'en', + 'pipeline': , + 'runner_data': dict({ + 'stt_binary_handler_id': None, + 'timeout': 300, + }), + }) +# --- +# name: test_text_only_pipeline[extra_msg1].1 + dict({ + 'conversation_id': 'mock-conversation-id', + 'device_id': 'mock-device-id', + 'engine': 'conversation.home_assistant', + 'intent_input': 'Are the lights on?', + 'language': 'en', + }) +# --- +# name: test_text_only_pipeline[extra_msg1].2 + dict({ + 'intent_output': dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any area called are', + }), + }), + }), + }), + }) +# --- +# name: test_text_only_pipeline[extra_msg1].3 None # --- # name: test_text_pipeline_timeout diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index f9b91af3bf1..4206a288331 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -13,6 +13,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) @@ -20,16 +21,16 @@ from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component from .conftest import ( + BYTES_ONE_SECOND, MockSttProvider, MockSttProviderEntity, MockTTSProvider, MockWakeWordEntity, + make_10ms_chunk, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -BYTES_ONE_SECOND = 16000 * 2 - def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" @@ -58,8 +59,8 @@ async def test_pipeline_from_audio_stream_auto( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -75,13 +76,13 @@ async def test_pipeline_from_audio_stream_auto( channel=stt.AudioChannels.CHANNEL_MONO, ), stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( @@ -100,8 +101,8 @@ async def test_pipeline_from_audio_stream_legacy( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -140,13 +141,13 @@ async def test_pipeline_from_audio_stream_legacy( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_entity( @@ -165,8 +166,8 @@ async def test_pipeline_from_audio_stream_entity( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -205,13 +206,13 @@ async def test_pipeline_from_audio_stream_entity( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot - assert mock_stt_provider_entity.received == [b"part1", b"part2"] + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_no_stt( @@ -230,8 +231,8 @@ async def test_pipeline_from_audio_stream_no_stt( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline without stt support @@ -271,9 +272,7 @@ async def test_pipeline_from_audio_stream_no_stt( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert not events @@ -293,8 +292,8 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Try to use the created pipeline @@ -335,24 +334,25 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - bytes_per_chunk = int(0.01 * BYTES_ONE_SECOND) + samples_per_chunk = 160 # 10ms @ 16Khz + bytes_per_chunk = samples_per_chunk * 2 # 16-bit async def audio_data(): - # 1 second in 10 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_1): yield wake_chunk_1[i : i + bytes_per_chunk] i += bytes_per_chunk - # 1 second in 30 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_2): yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - yield b"wake word!" - yield b"part1" - yield b"part2" + for header in (b"wake word!", b"part1", b"part2"): + yield make_10ms_chunk(header) + yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -372,9 +372,7 @@ async def test_pipeline_from_audio_stream_wake_word( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -390,7 +388,9 @@ async def test_pipeline_from_audio_stream_wake_word( ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider.received[-3:] == [b"queued audio", b"part1", b"part2"] + assert mock_stt_provider.received[-3] == b"queued audio" + assert mock_stt_provider.received[-2].startswith(b"part1") + assert mock_stt_provider.received[-1].startswith(b"part2") async def test_pipeline_save_audio( @@ -413,13 +413,11 @@ async def test_pipeline_save_audio( pipeline = assist_pipeline.async_get_pipeline(hass) events: list[assist_pipeline.PipelineEvent] = [] - # Pad out to an even number of bytes since these "samples" will be saved - # as 16-bit values. async def audio_data(): - yield b"wake word_" + yield make_10ms_chunk(b"wake word") # queued audio - yield b"part1_" - yield b"part2_" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -438,9 +436,7 @@ async def test_pipeline_save_audio( pipeline_id=pipeline.id, start_stage=assist_pipeline.PipelineStage.WAKE_WORD, end_stage=assist_pipeline.PipelineStage.STT, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) pipeline_dirs = list(temp_dir.iterdir()) @@ -464,12 +460,16 @@ async def test_pipeline_save_audio( # Verify wake file with wave.open(str(wake_file), "rb") as wake_wav: wake_data = wake_wav.readframes(wake_wav.getnframes()) - assert wake_data == b"wake word_" + assert wake_data.startswith(b"wake word") # Verify stt file with wave.open(str(stt_file), "rb") as stt_wav: stt_data = stt_wav.readframes(stt_wav.getnframes()) - assert stt_data == b"queued audiopart1_part2_" + assert stt_data.startswith(b"queued audio") + stt_data = stt_data[len(b"queued audio") :] + assert stt_data.startswith(b"part1") + stt_data = stt_data[BYTES_PER_CHUNK:] + assert stt_data.startswith(b"part2") async def test_pipeline_saved_audio_with_device_id( @@ -652,10 +652,10 @@ async def test_wake_word_detection_aborted( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"silence!" - yield b"wake word!" - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"silence!") + yield make_10ms_chunk(b"wake word!") + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" pipeline_store = pipeline_data.pipeline_store @@ -685,9 +685,7 @@ async def test_wake_word_detection_aborted( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ), ) await pipeline_input.validate() diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 3e1e99412d8..45a661c0f07 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -1,10 +1,10 @@ """Websocket tests for Voice Assistant integration.""" +from collections.abc import AsyncGenerator from typing import Any from unittest.mock import ANY, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import conversation from homeassistant.components.assist_pipeline.const import DOMAIN diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index 139ae915263..17cb73a9139 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -1,11 +1,9 @@ """Tests for voice command segmenter.""" import itertools as it -from unittest.mock import patch from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, - VoiceActivityDetector, VoiceCommandSegmenter, chunk_samples, ) @@ -44,59 +42,41 @@ def test_speech() -> None: def test_audio_buffer() -> None: """Test audio buffer wrapping.""" - class DisabledVad(VoiceActivityDetector): - def is_speech(self, chunk): - return False + samples_per_chunk = 160 # 10 ms + bytes_per_chunk = samples_per_chunk * 2 + leftover_buffer = AudioBuffer(bytes_per_chunk) - @property - def samples_per_chunk(self): - return 160 # 10 ms + # Partially fill audio buffer + half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) + chunks = list(chunk_samples(half_chunk, bytes_per_chunk, leftover_buffer)) - vad = DisabledVad() - bytes_per_chunk = vad.samples_per_chunk * 2 - vad_buffer = AudioBuffer(bytes_per_chunk) - segmenter = VoiceCommandSegmenter() + assert not chunks + assert leftover_buffer.bytes() == half_chunk - with patch.object(vad, "is_speech", return_value=False) as mock_process: - # Partially fill audio buffer - half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) - segmenter.process_with_vad(half_chunk, vad, vad_buffer) + # Fill and wrap with 1/4 chunk left over + three_quarters_chunk = bytes( + it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) + ) + chunks = list(chunk_samples(three_quarters_chunk, bytes_per_chunk, leftover_buffer)) - assert not mock_process.called - assert vad_buffer is not None - assert vad_buffer.bytes() == half_chunk + assert len(chunks) == 1 + assert ( + leftover_buffer.bytes() + == three_quarters_chunk[len(three_quarters_chunk) - (bytes_per_chunk // 4) :] + ) + assert chunks[0] == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - # Fill and wrap with 1/4 chunk left over - three_quarters_chunk = bytes( - it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) - ) - segmenter.process_with_vad(three_quarters_chunk, vad, vad_buffer) + # Run 2 chunks through + leftover_buffer.clear() + assert len(leftover_buffer) == 0 - assert mock_process.call_count == 1 - assert ( - vad_buffer.bytes() - == three_quarters_chunk[ - len(three_quarters_chunk) - (bytes_per_chunk // 4) : - ] - ) - assert ( - mock_process.call_args[0][0] - == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - ) + two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) + chunks = list(chunk_samples(two_chunks, bytes_per_chunk, leftover_buffer)) - # Run 2 chunks through - segmenter.reset() - vad_buffer.clear() - assert len(vad_buffer) == 0 - - mock_process.reset_mock() - two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) - segmenter.process_with_vad(two_chunks, vad, vad_buffer) - - assert mock_process.call_count == 2 - assert len(vad_buffer) == 0 - assert mock_process.call_args_list[0][0][0] == two_chunks[:bytes_per_chunk] - assert mock_process.call_args_list[1][0][0] == two_chunks[bytes_per_chunk:] + assert len(chunks) == 2 + assert len(leftover_buffer) == 0 + assert chunks[0] == two_chunks[:bytes_per_chunk] + assert chunks[1] == two_chunks[bytes_per_chunk:] def test_partial_chunk() -> None: @@ -125,43 +105,3 @@ def test_chunk_samples_leftover() -> None: assert len(chunks) == 1 assert leftover_chunk_buffer.bytes() == bytes([5, 6]) - - -def test_vad_no_chunking() -> None: - """Test VAD that doesn't require chunking.""" - - class VadNoChunk(VoiceActivityDetector): - def is_speech(self, chunk: bytes) -> bool: - return sum(chunk) > 0 - - @property - def samples_per_chunk(self) -> int | None: - return None - - vad = VadNoChunk() - segmenter = VoiceCommandSegmenter( - speech_seconds=1.0, silence_seconds=1.0, reset_seconds=0.5 - ) - silence = bytes([0] * 16000) - speech = bytes([255] * (16000 // 2)) - - # Test with differently-sized chunks - assert vad.is_speech(speech) - assert not vad.is_speech(silence) - - # Simulate voice command - assert segmenter.process_with_vad(silence, vad, None) - # begin - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # reset with silence - assert segmenter.process_with_vad(silence, vad, None) - # resume - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # end - assert segmenter.process_with_vad(silence, vad, None) - assert not segmenter.process_with_vad(silence, vad, None) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index e08dd9685ea..2da914f4252 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -5,9 +5,15 @@ import base64 from typing import Any from unittest.mock import ANY, patch +import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.assist_pipeline.const import DOMAIN +from homeassistant.components.assist_pipeline.const import ( + DOMAIN, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, @@ -17,17 +23,31 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr -from .conftest import MockWakeWordEntity, MockWakeWordEntity2 +from .conftest import ( + BYTES_ONE_SECOND, + BYTES_PER_CHUNK, + MockWakeWordEntity, + MockWakeWordEntity2, + make_10ms_chunk, +) from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator +@pytest.mark.parametrize( + "extra_msg", + [ + {}, + {"pipeline": "conversation.home_assistant"}, + ], +) async def test_text_only_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, snapshot: SnapshotAssertion, + extra_msg: dict[str, Any], ) -> None: """Test events from a pipeline run with text input (no STT/TTS).""" events = [] @@ -42,6 +62,7 @@ async def test_text_only_pipeline( "conversation_id": "mock-conversation-id", "device_id": "mock-device-id", } + | extra_msg ) # result @@ -195,7 +216,7 @@ async def test_audio_pipeline_with_wake_word_timeout( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "timeout": 1, }, } @@ -219,7 +240,7 @@ async def test_audio_pipeline_with_wake_word_timeout( events.append(msg["event"]) # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(16000 * 2 * 2)) + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) # Time out error msg = await client.receive_json() @@ -249,12 +270,7 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "timeout": 0, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, } ) @@ -277,9 +293,10 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events.append(msg["event"]) # "audio" - await client.send_bytes(bytes([handler_id]) + b"wake word") + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - msg = await client.receive_json() + async with asyncio.timeout(1): + msg = await client.receive_json() assert msg["event"]["type"] == "wake_word-end" assert msg["event"]["data"] == snapshot events.append(msg["event"]) @@ -360,7 +377,7 @@ async def test_audio_pipeline_no_wake_word_engine( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -397,7 +414,7 @@ async def test_audio_pipeline_no_wake_word_entity( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -1180,6 +1197,31 @@ async def test_get_pipeline( "wake_word_id": None, } + # Get conversation agent as pipeline + await client.send_json_auto_id( + { + "type": "assist_pipeline/pipeline/get", + "pipeline_id": "conversation.home_assistant", + } + ) + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "conversation_engine": "conversation.home_assistant", + "conversation_language": "en", + "id": ANY, + "language": "en", + "name": "Home Assistant", + # It found these defaults + "stt_engine": "test", + "stt_language": "en-US", + "tts_engine": "test", + "tts_language": "en-US", + "tts_voice": "james_earl_jones", + "wake_word_entity": None, + "wake_word_id": None, + } + await client.send_json_auto_id( { "type": "assist_pipeline/pipeline/get", @@ -1741,7 +1783,7 @@ async def test_audio_pipeline_with_enhancements( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, # Enhancements "noise_suppression_level": 2, "auto_gain_dbfs": 15, @@ -1771,7 +1813,7 @@ async def test_audio_pipeline_with_enhancements( # One second of silence. # This will pass through the audio enhancement pipeline, but we don't test # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(16000 * 2)) + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) # End of audio stream (handler id + empty payload) await client.send_bytes(bytes([handler_id])) @@ -1841,11 +1883,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1854,11 +1892,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1892,8 +1926,8 @@ async def test_wake_word_cooldown_same_id( assert msg["event"]["data"] == snapshot # Wake both up at the same time - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -1932,11 +1966,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1945,11 +1975,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1983,8 +2009,8 @@ async def test_wake_word_cooldown_different_ids( assert msg["event"]["data"] == snapshot # Wake both up at the same time, but they will have different wake word ids - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events msg = await client_1.receive_json() @@ -2059,11 +2085,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2074,11 +2096,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2113,8 +2131,8 @@ async def test_wake_word_cooldown_different_entities( # Wake both up at the same time. # They will have the same wake word id, but different entities. - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -2152,7 +2170,11 @@ async def test_device_capture( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start capture client_capture = await hass_ws_client(hass) @@ -2175,11 +2197,7 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2230,9 +2248,9 @@ async def test_device_capture( # Verify audio chunks for i, audio_chunk in enumerate(audio_chunks): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2257,7 +2275,11 @@ async def test_device_capture_override( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start first capture client_capture_1 = await hass_ws_client(hass) @@ -2280,11 +2302,7 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2367,9 +2385,9 @@ async def test_device_capture_override( # Verify all but first audio chunk for i, audio_chunk in enumerate(audio_chunks[1:]): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2429,11 +2447,7 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2454,8 +2468,8 @@ async def test_device_capture_queue_full( assert msg["event"]["type"] == "stt-start" assert msg["event"]["data"] == snapshot - # Single sample will "overflow" the queue - await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) + # Single chunk will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id]) + bytes(BYTES_PER_CHUNK)) # End of audio stream await client_pipeline.send_bytes(bytes([handler_id])) @@ -2563,7 +2577,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2575,7 +2589,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2634,7 +2648,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2646,7 +2660,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "hey_jarvis", }, } diff --git a/tests/components/asterisk_mbox/test_init.py b/tests/components/asterisk_mbox/test_init.py index 4800ada0ec4..d7567ea3286 100644 --- a/tests/components/asterisk_mbox/test_init.py +++ b/tests/components/asterisk_mbox/test_init.py @@ -1,9 +1,9 @@ """Test mailbox.""" +from collections.abc import Generator from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.asterisk_mbox import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/atag/conftest.py b/tests/components/atag/conftest.py index 83ba3e37aad..63476c4846d 100644 --- a/tests/components/atag/conftest.py +++ b/tests/components/atag/conftest.py @@ -1,10 +1,10 @@ """Provide common Atag fixtures.""" import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index 62c01d38d0c..30be50e75c9 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -58,6 +58,10 @@ def _mock_authenticator(auth_state): return authenticator +def _timetoken(): + return str(time.time_ns())[:-2] + + @patch("yalexs.manager.gateway.ApiAsync") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_authenticate") async def _mock_setup_august( diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 377a5bf2897..33d582de8d8 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,7 +1,6 @@ """The binary_sensor tests for the august platform.""" import datetime -import time from unittest.mock import Mock, patch from yalexs.pubnub_async import AugustPubNub @@ -25,15 +24,12 @@ from .mocks import ( _mock_doorbell_from_fixture, _mock_doorsense_enabled_august_lock_detail, _mock_lock_from_fixture, + _timetoken, ) from tests.common import async_fire_time_changed -def _timetoken(): - return str(time.time_ns())[:-2] - - async def test_doorsense(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_lock_from_fixture( @@ -98,7 +94,7 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF binary_sensor_k98gidt45gul_name_motion = hass.states.get( @@ -125,7 +121,7 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: ) assert binary_sensor_tmt100_name_online.state == STATE_OFF binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_occupancy" + "binary_sensor.tmt100_name_doorbell_ding" ) assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE @@ -147,13 +143,13 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) @@ -177,7 +173,7 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: ) assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF @@ -245,14 +241,14 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) @@ -276,20 +272,20 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) with patch( - "homeassistant.components.august.binary_sensor._native_datetime", + "homeassistant.components.august.util._native_datetime", return_value=native_time, ): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_occupancy" + "binary_sensor.k98gidt45gul_name_doorbell_ding" ) assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF @@ -407,6 +403,6 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: await _create_august_with_devices(hass, [lock_one]) ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_occupancy" + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" ) assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py new file mode 100644 index 00000000000..61b7560f462 --- /dev/null +++ b/tests/components/august/test_event.py @@ -0,0 +1,182 @@ +"""The event tests for the august.""" + +import datetime +from unittest.mock import Mock, patch + +from yalexs.pubnub_async import AugustPubNub + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_august_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, + _timetoken, +) + +from tests.common import async_fire_time_changed + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_august_with_devices(hass, [doorbell_one]) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_august_with_devices(hass, [doorbell_one]) + motion_state = hass.states.get("event.tmt100_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNAVAILABLE + doorbell_state = hass.states.get("event.tmt100_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_august_with_devices(hass, [doorbell_one], activities=activities) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state.state == isotime + + +async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: + """Test creation of a doorbell that can be updated via pubnub.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + pubnub = AugustPubNub() + + await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + pubnub.message( + pubnub, + Mock( + channel=doorbell_one.pubsub_channel, + timetoken=_timetoken(), + message={ + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ), + ) + + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + + pubnub.message( + pubnub, + Mock( + channel=doorbell_one.pubsub_channel, + timetoken=_timetoken(), + message={ + "status": "buttonpush", + }, + ), + ) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.august.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + assert motion_state.state == isotime + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_august_with_devices(hass, [lock_one]) + + doorbell_state = hass.states.get( + "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" + ) + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/aurora/conftest.py b/tests/components/aurora/conftest.py index 916f0925c4a..462203193f2 100644 --- a/tests/components/aurora/conftest.py +++ b/tests/components/aurora/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Aurora tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.aurora.const import CONF_THRESHOLD, DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE diff --git a/tests/components/autarco/__init__.py b/tests/components/autarco/__init__.py new file mode 100644 index 00000000000..208e5999fc7 --- /dev/null +++ b/tests/components/autarco/__init__.py @@ -0,0 +1,12 @@ +"""Tests for the Autarco integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/autarco/conftest.py b/tests/components/autarco/conftest.py new file mode 100644 index 00000000000..c7a95d7aa23 --- /dev/null +++ b/tests/components/autarco/conftest.py @@ -0,0 +1,82 @@ +"""Common fixtures for the Autarco tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from autarco import AccountSite, Inverter, Solar +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.autarco.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_autarco_client() -> Generator[AsyncMock]: + """Mock a Autarco client.""" + with ( + patch( + "homeassistant.components.autarco.Autarco", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.autarco.config_flow.Autarco", + new=mock_client, + ), + ): + client = mock_client.return_value + client.get_account.return_value = [ + AccountSite( + site_id=1, + public_key="key-public", + system_name="test-system", + retailer="test-retailer", + health="OK", + ) + ] + client.get_solar.return_value = Solar( + power_production=200, + energy_production_today=4, + energy_production_month=58, + energy_production_total=10379, + ) + client.get_inverters.return_value = { + "test-serial-1": Inverter( + serial_number="test-serial-1", + out_ac_power=200, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + "test-serial-2": Inverter( + serial_number="test-serial-2", + out_ac_power=500, + out_ac_energy_total=10379, + grid_turned_off=False, + health="OK", + ), + } + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Autarco", + data={ + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + }, + ) diff --git a/tests/components/autarco/snapshots/test_diagnostics.ambr b/tests/components/autarco/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..53d9f96fb86 --- /dev/null +++ b/tests/components/autarco/snapshots/test_diagnostics.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'sites_data': list([ + dict({ + 'health': 'OK', + 'id': 1, + 'inverters': list([ + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 200, + 'serial_number': 'test-serial-1', + }), + dict({ + 'grid_turned_off': False, + 'health': 'OK', + 'out_ac_energy_total': 10379, + 'out_ac_power': 500, + 'serial_number': 'test-serial-2', + }), + ]), + 'name': 'test-system', + 'solar': dict({ + 'energy_production_month': 58, + 'energy_production_today': 4, + 'energy_production_total': 10379, + 'power_production': 200, + }), + }), + ]), + }) +# --- diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..2ff0236a59f --- /dev/null +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -0,0 +1,805 @@ +# serializer version: 1 +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-1_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-1 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-1_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_1_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-1 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-2_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-2 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-2_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.inverter_test_serial_2_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-2 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_month', + 'unique_id': '1_solar_energy_production_month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_today', + 'unique_id': '1_solar_energy_production_today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production today', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_total', + 'unique_id': '1_solar_energy_production_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_energy_production_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power production', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_production', + 'unique_id': '1_solar_power_production', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensors[sensor.solar_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Solar Power production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-1_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-1 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-1_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-1 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy AC output total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_energy_total', + 'unique_id': 'test-serial-2_out_ac_energy_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter test-serial-2 Energy AC output total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power AC output', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'out_ac_power', + 'unique_id': 'test-serial-2_out_ac_power', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter test-serial-2 Power AC output', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '500', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production month', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_month', + 'unique_id': '1_solar_energy_production_month', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production month', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '58', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production today', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_today', + 'unique_id': '1_solar_energy_production_today', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production today', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_energy_production_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy production total', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_production_total', + 'unique_id': '1_solar_energy_production_total', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_energy_production_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Solar Energy production total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_energy_production_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10379', + }) +# --- +# name: test_solar_sensors[sensor.solar_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.solar_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power production', + 'platform': 'autarco', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_production', + 'unique_id': '1_solar_power_production', + 'unit_of_measurement': , + }) +# --- +# name: test_solar_sensors[sensor.solar_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Solar Power production', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.solar_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '200', + }) +# --- diff --git a/tests/components/autarco/test_config_flow.py b/tests/components/autarco/test_config_flow.py new file mode 100644 index 00000000000..621ad7f55c8 --- /dev/null +++ b/tests/components/autarco/test_config_flow.py @@ -0,0 +1,101 @@ +"""Test the Autarco config flow.""" + +from unittest.mock import AsyncMock + +from autarco import AutarcoAuthenticationError, AutarcoConnectionError +import pytest + +from homeassistant.components.autarco.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_user_flow( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert result.get("step_id") == "user" + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == "test@autarco.com" + assert result.get("data") == { + CONF_EMAIL: "test@autarco.com", + CONF_PASSWORD: "test-password", + } + assert len(mock_autarco_client.get_account.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_autarco_client: AsyncMock, +) -> None: + """Test abort when setting up duplicate entry.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result.get("type") is FlowResultType.FORM + assert not result.get("errors") + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + + assert result.get("type") is FlowResultType.ABORT + assert result.get("reason") == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (AutarcoConnectionError, "cannot_connect"), + (AutarcoAuthenticationError, "invalid_auth"), + ], +) +async def test_exceptions( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test exceptions.""" + mock_autarco_client.get_account.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.FORM + assert result.get("errors") == {"base": error} + + mock_autarco_client.get_account.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_EMAIL: "test@autarco.com", CONF_PASSWORD: "test-password"}, + ) + assert result.get("type") is FlowResultType.CREATE_ENTRY diff --git a/tests/components/autarco/test_diagnostics.py b/tests/components/autarco/test_diagnostics.py new file mode 100644 index 00000000000..1d12a2c1894 --- /dev/null +++ b/tests/components/autarco/test_diagnostics.py @@ -0,0 +1,30 @@ +"""Test Autarco diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot diff --git a/tests/components/autarco/test_init.py b/tests/components/autarco/test_init.py new file mode 100644 index 00000000000..81c5f947251 --- /dev/null +++ b/tests/components/autarco/test_init.py @@ -0,0 +1,28 @@ +"""Test the Autarco init module.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_autarco_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/autarco/test_sensor.py b/tests/components/autarco/test_sensor.py new file mode 100644 index 00000000000..e5e823501b9 --- /dev/null +++ b/tests/components/autarco/test_sensor.py @@ -0,0 +1,27 @@ +"""Test the sensor provided by the Autarco integration.""" + +from unittest.mock import MagicMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_sensors( + hass: HomeAssistant, + mock_autarco_client: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the Autarco sensors.""" + with patch("homeassistant.components.autarco.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/auth/test_init.py b/tests/components/auth/test_init.py index d0ca4699e0e..0f4908c2fc0 100644 --- a/tests/components/auth/test_init.py +++ b/tests/components/auth/test_init.py @@ -598,8 +598,8 @@ async def test_ws_delete_all_refresh_tokens( hass_admin_credential: Credentials, hass_ws_client: WebSocketGenerator, hass_access_token: str, - delete_token_type: dict[str:str], - delete_current_token: dict[str:bool], + delete_token_type: dict[str, str], + delete_current_token: dict[str, bool], expected_remaining_normal_tokens: int, expected_remaining_long_lived_tokens: int, ) -> None: diff --git a/tests/components/automation/test_blueprint.py b/tests/components/automation/test_blueprint.py index ee3fa631d00..2c92d7a5242 100644 --- a/tests/components/automation/test_blueprint.py +++ b/tests/components/automation/test_blueprint.py @@ -1,8 +1,10 @@ """Test built-in blueprints.""" import asyncio +from collections.abc import Iterator import contextlib from datetime import timedelta +from os import PathLike import pathlib from typing import Any from unittest.mock import patch @@ -23,7 +25,9 @@ BUILTIN_BLUEPRINT_FOLDER = pathlib.Path(automation.__file__).parent / "blueprint @contextlib.contextmanager -def patch_blueprint(blueprint_path: str, data_path): +def patch_blueprint( + blueprint_path: str, data_path: str | PathLike[str] +) -> Iterator[None]: """Patch blueprint loading from a different source.""" orig_load = models.DomainBlueprints._load_blueprint diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index 0c300540644..d8f04f10458 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -88,7 +88,7 @@ async def test_service_data_not_a_dict( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } }, ) @@ -111,7 +111,7 @@ async def test_service_data_single_template( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": "{{ { 'foo': 'bar' } }}", }, } @@ -136,7 +136,7 @@ async def test_service_specify_data( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": { "some": ( "{{ trigger.platform }} - {{ trigger.event.event_type }}" @@ -170,7 +170,7 @@ async def test_service_specify_entity_id( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -178,7 +178,7 @@ async def test_service_specify_entity_id( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] async def test_service_specify_entity_id_list( @@ -192,7 +192,7 @@ async def test_service_specify_entity_id_list( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -202,7 +202,7 @@ async def test_service_specify_entity_id_list( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world", "hello.world2"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world", "hello.world2"] async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> None: @@ -216,7 +216,7 @@ async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> No {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -245,7 +245,7 @@ async def test_trigger_service_ignoring_condition( "entity_id": "non.existing", "above": "1", }, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -301,7 +301,7 @@ async def test_two_conditions_with_and( "below": 150, }, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -333,7 +333,7 @@ async def test_shorthand_conditions_template( automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -360,11 +360,11 @@ async def test_automation_list_setting( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -390,8 +390,8 @@ async def test_automation_calling_two_actions( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ - {"service": "test.automation", "data": {"position": 0}}, - {"service": "test.automation", "data": {"position": 1}}, + {"action": "test.automation", "data": {"position": 0}}, + {"action": "test.automation", "data": {"position": 1}}, ], } }, @@ -420,7 +420,7 @@ async def test_shared_context(hass: HomeAssistant, calls: list[ServiceCall]) -> { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -486,7 +486,7 @@ async def test_services(hass: HomeAssistant, calls: list[ServiceCall]) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -569,7 +569,7 @@ async def test_reload_config_service( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -597,7 +597,7 @@ async def test_reload_config_service( "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -650,7 +650,7 @@ async def test_reload_config_when_invalid_config( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -690,7 +690,7 @@ async def test_reload_config_handles_load_fails( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -735,7 +735,7 @@ async def test_automation_stops( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -811,7 +811,7 @@ async def test_reload_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -858,7 +858,7 @@ async def test_reload_single_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -905,7 +905,7 @@ async def test_reload_single_add_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } assert await async_setup_component(hass, automation.DOMAIN, config1) @@ -942,25 +942,25 @@ async def test_reload_single_parallel_calls( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_sun"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "moon", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_moon"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "mars", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_mars"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "venus", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_venus"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1055,7 +1055,7 @@ async def test_reload_single_remove_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } config2 = {automation.DOMAIN: {}} @@ -1093,12 +1093,12 @@ async def test_reload_moved_automation_without_alias( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "automation_with_alias", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1149,17 +1149,17 @@ async def test_reload_identical_automations_without_id( { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1246,12 +1246,12 @@ async def test_reload_identical_automations_without_id( [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1278,13 +1278,13 @@ async def test_reload_identical_automations_without_id( { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1424,12 +1424,12 @@ async def test_automation_restore_state(hass: HomeAssistant) -> None: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -1474,7 +1474,7 @@ async def test_initial_value_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1499,7 +1499,7 @@ async def test_initial_value_on(hass: HomeAssistant) -> None: "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -1528,7 +1528,7 @@ async def test_initial_value_off_but_restore_on(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1553,7 +1553,7 @@ async def test_initial_value_on_but_restore_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1576,7 +1576,7 @@ async def test_no_initial_value_and_restore_off(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1600,7 +1600,7 @@ async def test_automation_is_on_if_no_initial_state_or_restore( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1623,7 +1623,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1641,7 +1641,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] @pytest.mark.parametrize( @@ -1714,7 +1714,7 @@ async def test_automation_bad_config_validation( "alias": "good_automation", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -1756,7 +1756,7 @@ async def test_automation_bad_config_validation( "alias": "bad_automation", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -1785,7 +1785,7 @@ async def test_automation_with_error_in_script( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1811,7 +1811,7 @@ async def test_automation_with_error_in_script_2( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": None, "entity_id": "hello.world"}, + "action": {"action": None, "entity_id": "hello.world"}, } }, ) @@ -1842,19 +1842,19 @@ async def test_automation_restore_last_triggered_with_initial_state( "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -2013,11 +2013,11 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -2027,15 +2027,15 @@ async def test_extraction_functions( "type": "turn_on", }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ], @@ -2087,7 +2087,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2140,7 +2140,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2150,27 +2150,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -2289,7 +2289,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2308,7 +2308,7 @@ async def test_automation_variables( "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { - "service": "test.automation", + "action": "test.automation", }, }, { @@ -2317,7 +2317,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2373,7 +2373,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2391,7 +2391,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2438,7 +2438,7 @@ async def test_automation_bad_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2465,7 +2465,7 @@ async def test_automation_this_var_always( { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "this_template": "{{this.entity_id}}", }, @@ -2542,7 +2542,7 @@ async def test_blueprint_automation( "Blueprint 'Call service based on event' generated invalid automation", ( "value should be a string for dictionary value @" - " data['action'][0]['service']" + " data['action'][0]['action']" ), ), ], @@ -2640,7 +2640,7 @@ async def test_trigger_service(hass: HomeAssistant, calls: list[ServiceCall]) -> "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } @@ -2679,14 +2679,14 @@ async def test_trigger_condition_implicit_id( { "conditions": {"condition": "trigger", "id": [0, "2"]}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "1"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2730,14 +2730,14 @@ async def test_trigger_condition_explicit_id( { "conditions": {"condition": "trigger", "id": "one"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "two"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2822,8 +2822,8 @@ async def test_recursive_automation_starting_script( f" {automation_runs} }}}}" ) }, - {"service": "script.script1"}, - {"service": "test.script_done"}, + {"action": "script.script1"}, + {"action": "test.script_done"}, ], }, } @@ -2840,9 +2840,9 @@ async def test_recursive_automation_starting_script( {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ - {"service": "test.automation_started"}, + {"action": "test.automation_started"}, {"delay": 0.001}, - {"service": "script.script1"}, + {"action": "script.script1"}, ], } }, @@ -2923,7 +2923,7 @@ async def test_recursive_automation( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -2985,7 +2985,7 @@ async def test_recursive_automation_restart_mode( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -3021,7 +3021,7 @@ async def test_websocket_config( config = { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } assert await async_setup_component( hass, automation.DOMAIN, {automation.DOMAIN: config} @@ -3095,7 +3095,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non "from": "on", }, "action": { - "service": "automation.turn_off", + "action": "automation.turn_off", "target": { "entity_id": "automation.automation_1", }, @@ -3118,7 +3118,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, }, "action": { - "service": "persistent_notification.create", + "action": "persistent_notification.create", "metadata": {}, "data": { "message": "Test race", @@ -3185,7 +3185,7 @@ async def test_two_automations_call_restart_script_same_time( "fire_toggle": { "sequence": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test_1"}, } ] @@ -3206,7 +3206,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_0", "mode": "single", @@ -3218,7 +3218,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_1", "mode": "single", @@ -3229,6 +3229,7 @@ async def test_two_automations_call_restart_script_same_time( hass.states.async_set("binary_sensor.presence", "on") await hass.async_block_till_done() + await hass.async_block_till_done() assert len(events) == 2 cancel() @@ -3300,3 +3301,29 @@ async def test_two_automation_call_restart_script_right_after_each_other( hass.states.async_set("input_boolean.test_2", "on") await hass.async_block_till_done() assert len(events) == 1 + + +async def test_action_service_backward_compatibility( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test we can still use the service call method.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { + "service": "test.automation", + "entity_id": "hello.world", + "data": {"event": "{{ trigger.event.event_type }}"}, + }, + } + }, + ) + + hass.bus.async_fire("test_event") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert calls[0].data.get("event") == "test_event" diff --git a/tests/components/automation/test_recorder.py b/tests/components/automation/test_recorder.py index fc45e6aee5b..be354abe9d2 100644 --- a/tests/components/automation/test_recorder.py +++ b/tests/components/automation/test_recorder.py @@ -40,7 +40,7 @@ async def test_exclude_attributes( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -48,7 +48,7 @@ async def test_exclude_attributes( hass.bus.async_fire("test_event") await hass.async_block_till_done() assert len(calls) == 1 - assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID) + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] await async_wait_recording_done(hass) states = await hass.async_add_executor_job( diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index b306e25c434..c3377c15955 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -2,19 +2,17 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Coroutine, Generator from copy import deepcopy from types import MappingProxyType -from typing import Any +from typing import Any, Protocol from unittest.mock import AsyncMock, patch from axis.rtsp import Signal, State import pytest import respx -from typing_extensions import Generator from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_MODEL, @@ -48,9 +46,33 @@ from .const import ( from tests.common import MockConfigEntry +type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] +type RtspStateType = Callable[[bool], None] -@pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: + +class RtspEventMock(Protocol): + """Fixture to allow mocking received RTSP events.""" + + def __call__( + self, + topic: str, + data_type: str, + data_value: str, + operation: str = "Initialized", + source_name: str = "", + source_idx: str = "", + ) -> None: + """Send RTSP event.""" + + +class _RtspClientMock(Protocol): + async def __call__( + self, data: dict[str, Any] | None = None, state: str = "" + ) -> None: ... + + +@pytest.fixture(name="mock_setup_entry") +def fixture_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.axis.async_setup_entry", return_value=True @@ -62,14 +84,13 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def config_entry_fixture( - hass: HomeAssistant, +def fixture_config_entry( config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], config_entry_version: int, -) -> ConfigEntry: +) -> MockConfigEntry: """Define a config entry fixture.""" - config_entry = MockConfigEntry( + return MockConfigEntry( domain=AXIS_DOMAIN, entry_id="676abe5b73621446e6550a2e86ffe3dd", unique_id=FORMATTED_MAC, @@ -77,18 +98,16 @@ def config_entry_fixture( options=config_entry_options, version=config_entry_version, ) - config_entry.add_to_hass(hass) - return config_entry @pytest.fixture(name="config_entry_version") -def config_entry_version_fixture() -> int: +def fixture_config_entry_version() -> int: """Define a config entry version fixture.""" return 3 @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture() -> MappingProxyType[str, Any]: +def fixture_config_entry_data() -> MappingProxyType[str, Any]: """Define a config entry data fixture.""" return { CONF_HOST: DEFAULT_HOST, @@ -101,7 +120,7 @@ def config_entry_data_fixture() -> MappingProxyType[str, Any]: @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture() -> MappingProxyType[str, Any]: +def fixture_config_entry_options() -> MappingProxyType[str, Any]: """Define a config entry options fixture.""" return {} @@ -109,8 +128,15 @@ def config_entry_options_fixture() -> MappingProxyType[str, Any]: # Axis API fixtures -@pytest.fixture(name="mock_vapix_requests") -def default_request_fixture( +@pytest.fixture(autouse=True) +def reset_mock_requests() -> Generator[None]: + """Reset respx mock routes after the test.""" + yield + respx.mock.clear() + + +@pytest.fixture(name="mock_requests") +def fixture_request( respx_mock: respx.MockRouter, port_management_payload: dict[str, Any], param_properties_payload: str, @@ -215,7 +241,7 @@ def api_discovery_items() -> dict[str, Any]: @pytest.fixture(autouse=True) -def api_discovery_fixture(api_discovery_items: dict[str, Any]) -> None: +def fixture_api_discovery(api_discovery_items: dict[str, Any]) -> None: """Apidiscovery mock response.""" data = deepcopy(API_DISCOVERY_RESPONSE) if api_discovery_items: @@ -224,64 +250,66 @@ def api_discovery_fixture(api_discovery_items: dict[str, Any]) -> None: @pytest.fixture(name="port_management_payload") -def io_port_management_data_fixture() -> dict[str, Any]: +def fixture_io_port_management_data() -> dict[str, Any]: """Property parameter data.""" return PORT_MANAGEMENT_RESPONSE @pytest.fixture(name="param_properties_payload") -def param_properties_data_fixture() -> str: +def fixture_param_properties_data() -> str: """Property parameter data.""" return PROPERTIES_RESPONSE @pytest.fixture(name="param_ports_payload") -def param_ports_data_fixture() -> str: +def fixture_param_ports_data() -> str: """Property parameter data.""" return PORTS_RESPONSE @pytest.fixture(name="mqtt_status_code") -def mqtt_status_code_fixture() -> int: +def fixture_mqtt_status_code() -> int: """Property parameter data.""" return 200 -@pytest.fixture(name="setup_default_vapix_requests") -def default_vapix_requests_fixture(mock_vapix_requests: Callable[[str], None]) -> None: +@pytest.fixture(name="mock_default_requests") +def fixture_default_requests(mock_requests: Callable[[str], None]) -> None: """Mock default Vapix requests responses.""" - mock_vapix_requests(DEFAULT_HOST) + mock_requests(DEFAULT_HOST) -@pytest.fixture(name="prepare_config_entry") -async def prep_config_entry_fixture( - hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None -) -> Callable[[], ConfigEntry]: +@pytest.fixture(name="config_entry_factory") +async def fixture_config_entry_factory( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_requests: Callable[[str], None], +) -> ConfigEntryFactoryType: """Fixture factory to set up Axis network device.""" - async def __mock_setup_config_entry() -> ConfigEntry: - assert await hass.config_entries.async_setup(config_entry.entry_id) + async def __mock_setup_config_entry() -> MockConfigEntry: + config_entry.add_to_hass(hass) + mock_requests(config_entry.data[CONF_HOST]) + await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() return config_entry return __mock_setup_config_entry -@pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture( - hass: HomeAssistant, config_entry: ConfigEntry, setup_default_vapix_requests: None -) -> ConfigEntry: +@pytest.fixture(name="config_entry_setup") +async def fixture_config_entry_setup( + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: """Define a fixture to set up Axis network device.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - return config_entry + return await config_entry_factory() # RTSP fixtures -@pytest.fixture(autouse=True) -def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: +@pytest.fixture(autouse=True, name="_mock_rtsp_client") +def fixture_axis_rtsp_client() -> Generator[_RtspClientMock]: """No real RTSP communication allowed.""" with patch("axis.stream_manager.RTSPClient") as rtsp_client_mock: rtsp_client_mock.return_value.session.state = State.STOPPED @@ -298,7 +326,7 @@ def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: rtsp_client_mock.return_value.stop = stop_stream - def make_rtsp_call(data: dict | None = None, state: str = "") -> None: + def make_rtsp_call(data: dict[str, Any] | None = None, state: str = "") -> None: """Generate a RTSP call.""" axis_streammanager_session_callback = rtsp_client_mock.call_args[0][4] @@ -313,10 +341,8 @@ def mock_axis_rtspclient() -> Generator[Callable[[dict | None, str], None]]: yield make_rtsp_call -@pytest.fixture(autouse=True) -def mock_rtsp_event( - mock_axis_rtspclient: Callable[[dict | None, str], None], -) -> Callable[[str, str, str, str, str, str], None]: +@pytest.fixture(autouse=True, name="mock_rtsp_event") +def fixture_rtsp_event(_mock_rtsp_client: _RtspClientMock) -> RtspEventMock: """Fixture to allow mocking received RTSP events.""" def send_event( @@ -361,20 +387,18 @@ def mock_rtsp_event( """ - mock_axis_rtspclient(data=event.encode("utf-8")) + _mock_rtsp_client(data=event.encode("utf-8")) return send_event -@pytest.fixture(autouse=True) -def mock_rtsp_signal_state( - mock_axis_rtspclient: Callable[[dict | None, str], None], -) -> Callable[[bool], None]: +@pytest.fixture(autouse=True, name="mock_rtsp_signal_state") +def fixture_rtsp_signal_state(_mock_rtsp_client: _RtspClientMock) -> RtspStateType: """Fixture to allow mocking RTSP state signalling.""" def send_signal(connected: bool) -> None: """Signal state change of RTSP connection.""" signal = Signal.PLAYING if connected else Signal.FAILED - mock_axis_rtspclient(state=signal) + _mock_rtsp_client(state=signal) return send_signal diff --git a/tests/components/axis/const.py b/tests/components/axis/const.py index 16b9d17f99e..2efb464efd7 100644 --- a/tests/components/axis/const.py +++ b/tests/components/axis/const.py @@ -4,8 +4,8 @@ from axis.models.api import CONTEXT MAC = "00408C123456" FORMATTED_MAC = "00:40:8c:12:34:56" -MODEL = "model" -NAME = "name" +MODEL = "A1234" +NAME = "home" DEFAULT_HOST = "1.2.3.4" diff --git a/tests/components/axis/snapshots/test_binary_sensor.ambr b/tests/components/axis/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..94b1cc2fc2e --- /dev/null +++ b/tests/components/axis/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1343 @@ +# serializer version: 1 +# name: test_binary_sensors[event0-binary_sensor.name_daynight_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'light', + 'friendly_name': 'name DayNight 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_daynight_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event0-daynight_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'light', + 'friendly_name': 'name DayNight 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_daynight_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_daynight_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DayNight 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'light', + 'friendly_name': 'home DayNight 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_daynight_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_daynight_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DayNight 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event0][binary_sensor.home_daynight_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'light', + 'friendly_name': 'home DayNight 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_daynight_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event1-binary_sensor.name_sound_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'sound', + 'friendly_name': 'name Sound 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_sound_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event1-sound_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'sound', + 'friendly_name': 'name Sound 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_sound_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_sound_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sound 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'sound', + 'friendly_name': 'home Sound 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_sound_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event10-binary_sensor.name_object_analytics_device1scenario8] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Object Analytics Device1Scenario8', + }), + 'context': , + 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event10-object_analytics_device1scenario8] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Object Analytics Device1Scenario8', + }), + 'context': , + 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Device1Scenario8', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Device1Scenario8', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Device1Scenario8', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Device1Scenario8', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event1][binary_sensor.home_sound_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_sound_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sound 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event1][binary_sensor.home_sound_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'sound', + 'friendly_name': 'home Sound 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_sound_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event2-binary_sensor.name_pir_sensor] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'name PIR sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.name_pir_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event2-pir_sensor] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'name PIR sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.name_pir_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR sensor', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'home PIR sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR sensor', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'home PIR sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event3-binary_sensor.name_pir_0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name PIR 0', + }), + 'context': , + 'entity_id': 'binary_sensor.name_pir_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event3-pir_0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name PIR 0', + }), + 'context': , + 'entity_id': 'binary_sensor.name_pir_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR 0', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home PIR 0', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event3][binary_sensor.home_pir_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_pir_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PIR 0', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event3][binary_sensor.home_pir_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home PIR 0', + }), + 'context': , + 'entity_id': 'binary_sensor.home_pir_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[event4-binary_sensor.name_fence_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Fence Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_fence_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event4-fence_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Fence Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_fence_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fence Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Fence Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fence Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Fence Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_fence_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event5-binary_sensor.name_motion_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Motion Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_motion_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event5-motion_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Motion Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_motion_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Motion Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Motion Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_motion_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event6-binary_sensor.name_loitering_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Loitering Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event6-loitering_guard_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Loitering Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Loitering Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Loitering Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Loitering Guard Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Loitering Guard Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event7-binary_sensor.name_vmd4_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name VMD4 Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_vmd4_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event7-vmd4_profile_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name VMD4 Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_vmd4_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Profile 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Profile 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_profile_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event8-binary_sensor.name_object_analytics_scenario_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Object Analytics Scenario 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event8-object_analytics_scenario_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name Object Analytics Scenario 1', + }), + 'context': , + 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Scenario 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Scenario 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Object Analytics Scenario 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home Object Analytics Scenario 1', + }), + 'context': , + 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event9-binary_sensor.name_vmd4_camera1profile9] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name VMD4 Camera1Profile9', + }), + 'context': , + 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event9-vmd4_camera1profile9] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'name VMD4 Camera1Profile9', + }), + 'context': , + 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Camera1Profile9', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Camera1Profile9', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VMD4 Camera1Profile9', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'motion', + 'friendly_name': 'home VMD4 Camera1Profile9', + }), + 'context': , + 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/snapshots/test_diagnostics.ambr b/tests/components/axis/snapshots/test_diagnostics.ambr index 8ea316d00cf..3a643f55d3e 100644 --- a/tests/components/axis/snapshots/test_diagnostics.ambr +++ b/tests/components/axis/snapshots/test_diagnostics.ambr @@ -30,8 +30,8 @@ 'config': dict({ 'data': dict({ 'host': '1.2.3.4', - 'model': 'model', - 'name': 'name', + 'model': 'A1234', + 'name': 'home', 'password': '**REDACTED**', 'port': 80, 'username': '**REDACTED**', diff --git a/tests/components/axis/snapshots/test_hub.ambr b/tests/components/axis/snapshots/test_hub.ambr new file mode 100644 index 00000000000..16579287f09 --- /dev/null +++ b/tests/components/axis/snapshots/test_hub.ambr @@ -0,0 +1,73 @@ +# serializer version: 1 +# name: test_device_registry_entry[api_discovery_items0] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + tuple( + 'mac', + '00:40:8c:12:34:56', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'axis', + '00:40:8c:12:34:56', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Axis Communications AB', + 'model': 'A1234 Network Camera', + 'model_id': None, + 'name': 'home', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '00:40:8c:12:34:56', + 'suggested_area': None, + 'sw_version': '9.10.1', + 'via_device_id': None, + }) +# --- +# name: test_device_registry_entry[api_discovery_items1] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + tuple( + 'mac', + '00:40:8c:12:34:56', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'axis', + '00:40:8c:12:34:56', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Axis Communications AB', + 'model': 'A1234 Network Camera', + 'model_id': None, + 'name': 'home', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '00:40:8c:12:34:56', + 'suggested_area': None, + 'sw_version': '9.80.1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/axis/snapshots/test_light.ambr b/tests/components/axis/snapshots/test_light.ambr new file mode 100644 index 00000000000..b37da39fe27 --- /dev/null +++ b/tests/components/axis/snapshots/test_light.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_lights[api_discovery_items0][light.home_ir_light_0-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.home_ir_light_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IR Light 0', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Light/Status-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[api_discovery_items0][light.home_ir_light_0-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 170, + 'color_mode': , + 'friendly_name': 'home IR Light 0', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.home_ir_light_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/snapshots/test_switch.ambr b/tests/components/axis/snapshots/test_switch.ambr new file mode 100644 index 00000000000..dc4c75371cf --- /dev/null +++ b/tests/components/axis/snapshots/test_switch.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/test_binary_sensor.py b/tests/components/axis/test_binary_sensor.py index 99a530724e3..a1cf1e129d5 100644 --- a/tests/components/axis/test_binary_sensor.py +++ b/tests/components/axis/test_binary_sensor.py @@ -1,22 +1,22 @@ """Axis binary sensor platform tests.""" -from collections.abc import Callable -from typing import Any +from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.binary_sensor import ( - DOMAIN as BINARY_SENSOR_DOMAIN, - BinarySensorDeviceClass, -) -from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .const import NAME +from .conftest import ConfigEntryFactoryType, RtspEventMock + +from tests.common import snapshot_platform @pytest.mark.parametrize( - ("event", "entity"), + "event", [ ( { @@ -25,13 +25,7 @@ from .const import NAME "source_idx": "1", "data_type": "DayNight", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_daynight_1", - "state": STATE_ON, - "name": f"{NAME} DayNight 1", - "device_class": BinarySensorDeviceClass.LIGHT, - }, + } ), ( { @@ -40,13 +34,7 @@ from .const import NAME "source_idx": "1", "data_type": "Sound", "data_value": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1", - "state": STATE_OFF, - "name": f"{NAME} Sound 1", - "device_class": BinarySensorDeviceClass.SOUND, - }, + } ), ( { @@ -56,13 +44,7 @@ from .const import NAME "operation": "Initialized", "source_name": "port", "source_idx": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_sensor", - "state": STATE_OFF, - "name": f"{NAME} PIR sensor", - "device_class": BinarySensorDeviceClass.CONNECTIVITY, - }, + } ), ( { @@ -71,78 +53,42 @@ from .const import NAME "data_value": "0", "source_name": "sensor", "source_idx": "0", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_pir_0", - "state": STATE_OFF, - "name": f"{NAME} PIR 0", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_fence_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Fence Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_motion_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Motion Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_loitering_guard_profile_1", - "state": STATE_ON, - "name": f"{NAME} Loitering Guard Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_profile_1", - "state": STATE_ON, - "name": f"{NAME} VMD4 Profile 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_scenario_1", - "state": STATE_ON, - "name": f"{NAME} Object Analytics Scenario 1", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), # Events with names generated from event ID and topic ( @@ -150,50 +96,35 @@ from .const import NAME "topic": "tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_vmd4_camera1profile9", - "state": STATE_ON, - "name": f"{NAME} VMD4 Camera1Profile9", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ( { "topic": "tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8", "data_type": "active", "data_value": "1", - }, - { - "id": f"{BINARY_SENSOR_DOMAIN}.{NAME}_object_analytics_device1scenario8", - "state": STATE_ON, - "name": f"{NAME} Object Analytics Device1Scenario8", - "device_class": BinarySensorDeviceClass.MOTION, - }, + } ), ], ) -@pytest.mark.usefixtures("setup_config_entry") async def test_binary_sensors( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, event: dict[str, str], - entity: dict[str, Any], ) -> None: """Test that sensors are loaded properly.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.BINARY_SENSOR]): + config_entry = await config_entry_factory() mock_rtsp_event(**event) - await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 1 - - state = hass.states.get(entity["id"]) - assert state.state == entity["state"] - assert state.name == entity["name"] - assert state.attributes["device_class"] == entity["device_class"] + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) @pytest.mark.parametrize( - ("event"), + "event", [ # Event with unsupported topic { @@ -225,13 +156,12 @@ async def test_binary_sensors( }, ], ) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_unsupported_events( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + mock_rtsp_event: RtspEventMock, event: dict[str, str], ) -> None: """Validate nothing breaks with unsupported events.""" mock_rtsp_event(**event) - await hass.async_block_till_done() assert len(hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN)) == 0 diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 7d26cc7a3bc..00fe4391b0c 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -1,36 +1,17 @@ """Axis camera platform tests.""" -from collections.abc import Callable - import pytest from homeassistant.components import camera -from homeassistant.components.axis.const import ( - CONF_STREAM_PROFILE, - DOMAIN as AXIS_DOMAIN, -) +from homeassistant.components.axis.const import CONF_STREAM_PROFILE from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_IDLE from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .const import MAC, NAME -async def test_platform_manually_configured(hass: HomeAssistant) -> None: - """Test that nothing happens when platform is manually configured.""" - assert ( - await async_setup_component( - hass, CAMERA_DOMAIN, {CAMERA_DOMAIN: {"platform": AXIS_DOMAIN}} - ) - is True - ) - - assert AXIS_DOMAIN not in hass.data - - -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_camera(hass: HomeAssistant) -> None: """Test that Axis camera platform is loaded properly.""" assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 @@ -51,7 +32,7 @@ async def test_camera(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_entry_options", [{CONF_STREAM_PROFILE: "profile_1"}]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_camera_with_stream_profile(hass: HomeAssistant) -> None: """Test that Axis camera entity is using the correct path with stream profike.""" assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 @@ -82,13 +63,11 @@ root.Properties.Firmware.BuildDate=Feb 15 2019 09:42 root.Properties.Firmware.BuildNumber=26 root.Properties.Firmware.Version=9.10.1 root.Properties.System.SerialNumber={MAC} -""" +""" # No image format data to signal camera support @pytest.mark.parametrize("param_properties_payload", [PROPERTY_DATA]) -async def test_camera_disabled( - hass: HomeAssistant, prepare_config_entry: Callable[[], ConfigEntry] -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_camera_disabled(hass: HomeAssistant) -> None: """Test that Axis camera platform is loaded properly but does not create camera entity.""" - await prepare_config_entry() assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 0 diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 055c74cc9a5..5ceb6588fbd 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Callable from ipaddress import ip_address -from unittest.mock import AsyncMock, patch +from unittest.mock import patch import pytest @@ -17,13 +17,11 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, - SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF, - ConfigEntry, ) from homeassistant.const import ( CONF_HOST, @@ -45,21 +43,9 @@ from tests.common import MockConfigEntry DHCP_FORMATTED_MAC = dr.format_mac(MAC).replace(":", "") -@pytest.fixture(name="mock_config_entry") -async def mock_config_entry_fixture( - hass: HomeAssistant, config_entry: MockConfigEntry, mock_setup_entry: AsyncMock -) -> MockConfigEntry: - """Mock config entry and setup entry.""" - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - return config_entry - - -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_flow_manual_configuration(hass: HomeAssistant) -> None: """Test that config flow works.""" - MockConfigEntry(domain=AXIS_DOMAIN, source=SOURCE_IGNORE).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} ) @@ -93,11 +79,11 @@ async def test_flow_manual_configuration(hass: HomeAssistant) -> None: async def test_manual_configuration_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -106,7 +92,7 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -121,10 +107,19 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" -async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("exc", "error"), + [ + (config_flow.AuthenticationRequired, "invalid_auth"), + (config_flow.CannotConnect, "cannot_connect"), + ], +) +async def test_flow_fails_on_api( + hass: HomeAssistant, exc: Exception, error: str +) -> None: """Test that config flow fails on faulty credentials.""" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_USER} @@ -135,7 +130,7 @@ async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: with patch( "homeassistant.components.axis.config_flow.get_axis_api", - side_effect=config_flow.AuthenticationRequired, + side_effect=exc, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -148,37 +143,10 @@ async def test_flow_fails_faulty_credentials(hass: HomeAssistant) -> None: }, ) - assert result["errors"] == {"base": "invalid_auth"} + assert result["errors"] == {"base": error} -async def test_flow_fails_cannot_connect(hass: HomeAssistant) -> None: - """Test that config flow fails on cannot connect.""" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, context={"source": SOURCE_USER} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - with patch( - "homeassistant.components.axis.config_flow.get_axis_api", - side_effect=config_flow.CannotConnect, - ): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_PROTOCOL: "http", - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "user", - CONF_PASSWORD: "pass", - CONF_PORT: 80, - }, - ) - - assert result["errors"] == {"base": "cannot_connect"} - - -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_flow_create_entry_multiple_existing_entries_of_same_model( hass: HomeAssistant, ) -> None: @@ -229,24 +197,24 @@ async def test_flow_create_entry_multiple_existing_entries_of_same_model( async def test_reauth_flow_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow fails on already configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" - assert mock_config_entry.data[CONF_USERNAME] == "root" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_USERNAME] == "root" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={"source": SOURCE_REAUTH}, - data=mock_config_entry.data, + data=config_entry_setup.data, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -261,35 +229,35 @@ async def test_reauth_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_PROTOCOL] == "https" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" - assert mock_config_entry.data[CONF_PORT] == 443 - assert mock_config_entry.data[CONF_USERNAME] == "user2" - assert mock_config_entry.data[CONF_PASSWORD] == "pass2" + assert config_entry_setup.data[CONF_PROTOCOL] == "https" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 443 + assert config_entry_setup.data[CONF_USERNAME] == "user2" + assert config_entry_setup.data[CONF_PASSWORD] == "pass2" async def test_reconfiguration_flow_update_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test that config flow reconfiguration updates configured device.""" - assert mock_config_entry.data[CONF_HOST] == "1.2.3.4" - assert mock_config_entry.data[CONF_USERNAME] == "root" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_USERNAME] == "root" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, context={ "source": SOURCE_RECONFIGURE, - "entry_id": mock_config_entry.entry_id, + "entry_id": config_entry_setup.entry_id, }, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -301,11 +269,11 @@ async def test_reconfiguration_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_PROTOCOL] == "http" - assert mock_config_entry.data[CONF_HOST] == "2.3.4.5" - assert mock_config_entry.data[CONF_PORT] == 80 - assert mock_config_entry.data[CONF_USERNAME] == "user" - assert mock_config_entry.data[CONF_PASSWORD] == "pass" + assert config_entry_setup.data[CONF_PROTOCOL] == "http" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 80 + assert config_entry_setup.data[CONF_USERNAME] == "user" + assert config_entry_setup.data[CONF_PASSWORD] == "pass" @pytest.mark.parametrize( @@ -372,7 +340,7 @@ async def test_reconfiguration_flow_update_configuration( ), ], ) -@pytest.mark.usefixtures("setup_default_vapix_requests", "mock_setup_entry") +@pytest.mark.usefixtures("mock_default_requests") async def test_discovery_flow( hass: HomeAssistant, source: str, @@ -455,12 +423,12 @@ async def test_discovery_flow( ) async def test_discovered_device_already_configured( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, + config_entry_setup: MockConfigEntry, source: str, discovery_info: BaseServiceInfo, ) -> None: """Test that discovery doesn't setup already configured devices.""" - assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST + assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} @@ -468,7 +436,7 @@ async def test_discovered_device_already_configured( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_HOST] == DEFAULT_HOST + assert config_entry_setup.data[CONF_HOST] == DEFAULT_HOST @pytest.mark.parametrize( @@ -513,14 +481,14 @@ async def test_discovered_device_already_configured( ) async def test_discovery_flow_updated_configuration( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], source: str, discovery_info: BaseServiceInfo, expected_port: int, ) -> None: """Test that discovery flow update configuration with new parameters.""" - assert mock_config_entry.data == { + assert config_entry_setup.data == { CONF_HOST: DEFAULT_HOST, CONF_PORT: 80, CONF_USERNAME: "root", @@ -529,7 +497,7 @@ async def test_discovery_flow_updated_configuration( CONF_NAME: NAME, } - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") result = await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=discovery_info, context={"source": source} ) @@ -537,7 +505,7 @@ async def test_discovery_flow_updated_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert mock_config_entry.data == { + assert config_entry_setup.data == { CONF_HOST: "2.3.4.5", CONF_PORT: expected_port, CONF_USERNAME: "root", @@ -646,13 +614,13 @@ async def test_discovery_flow_ignore_link_local_address( async def test_option_flow( - hass: HomeAssistant, setup_config_entry: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test config flow options.""" - assert CONF_STREAM_PROFILE not in setup_config_entry.options - assert CONF_VIDEO_SOURCE not in setup_config_entry.options + assert CONF_STREAM_PROFILE not in config_entry_setup.options + assert CONF_VIDEO_SOURCE not in config_entry_setup.options - result = await hass.config_entries.options.async_init(setup_config_entry.entry_id) + result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "configure_stream" @@ -676,5 +644,5 @@ async def test_option_flow( CONF_STREAM_PROFILE: "profile_1", CONF_VIDEO_SOURCE: 1, } - assert setup_config_entry.options[CONF_STREAM_PROFILE] == "profile_1" - assert setup_config_entry.options[CONF_VIDEO_SOURCE] == 1 + assert config_entry_setup.options[CONF_STREAM_PROFILE] == "profile_1" + assert config_entry_setup.options[CONF_VIDEO_SOURCE] == 1 diff --git a/tests/components/axis/test_diagnostics.py b/tests/components/axis/test_diagnostics.py index c3e1faf4277..e96ba88c2cd 100644 --- a/tests/components/axis/test_diagnostics.py +++ b/tests/components/axis/test_diagnostics.py @@ -2,12 +2,13 @@ import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from .const import API_DISCOVERY_BASIC_DEVICE_INFO +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -16,11 +17,10 @@ from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup_config_entry: ConfigEntry, + config_entry_setup: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, setup_config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/axis/test_hub.py b/tests/components/axis/test_hub.py index fb0a28bb262..74cdb0164cd 100644 --- a/tests/components/axis/test_hub.py +++ b/tests/components/axis/test_hub.py @@ -5,27 +5,21 @@ from ipaddress import ip_address from types import MappingProxyType from typing import Any from unittest import mock -from unittest.mock import ANY, AsyncMock, Mock, call, patch +from unittest.mock import ANY, Mock, call, patch import axis as axislib import pytest -from typing_extensions import Generator +from syrupy import SnapshotAssertion from homeassistant.components import axis, zeroconf from homeassistant.components.axis.const import DOMAIN as AXIS_DOMAIN from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_MODEL, - CONF_NAME, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, -) +from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from .conftest import RtspEventMock, RtspStateType from .const import ( API_DISCOVERY_BASIC_DEVICE_INFO, API_DISCOVERY_MQTT, @@ -34,62 +28,27 @@ from .const import ( NAME, ) -from tests.common import async_fire_mqtt_message +from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import MqttMockHAClient -@pytest.fixture(name="forward_entry_setups") -def hass_mock_forward_entry_setup(hass: HomeAssistant) -> Generator[AsyncMock]: - """Mock async_forward_entry_setups.""" - with patch.object( - hass.config_entries, "async_forward_entry_setups" - ) as forward_mock: - yield forward_mock - - -async def test_device_setup( - forward_entry_setups: AsyncMock, - config_entry_data: MappingProxyType[str, Any], - setup_config_entry: ConfigEntry, +@pytest.mark.parametrize( + "api_discovery_items", [({}), (API_DISCOVERY_BASIC_DEVICE_INFO)] +) +async def test_device_registry_entry( + config_entry_setup: MockConfigEntry, device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, ) -> None: """Successful setup.""" - hub = setup_config_entry.runtime_data - - assert hub.api.vapix.firmware_version == "9.10.1" - assert hub.api.vapix.product_number == "M1065-LW" - assert hub.api.vapix.product_type == "Network Camera" - assert hub.api.vapix.serial_number == "00408C123456" - - assert len(forward_entry_setups.mock_calls) == 1 - platforms = set(forward_entry_setups.mock_calls[0][1][1]) - assert platforms == {"binary_sensor", "camera", "light", "switch"} - - assert hub.config.host == config_entry_data[CONF_HOST] - assert hub.config.model == config_entry_data[CONF_MODEL] - assert hub.config.name == config_entry_data[CONF_NAME] - assert hub.unique_id == FORMATTED_MAC - device_entry = device_registry.async_get_device( - identifiers={(AXIS_DOMAIN, hub.unique_id)} + identifiers={(AXIS_DOMAIN, config_entry_setup.unique_id)} ) - - assert device_entry.configuration_url == hub.api.config.url - - -@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_BASIC_DEVICE_INFO]) -async def test_device_info(setup_config_entry: ConfigEntry) -> None: - """Verify other path of device information works.""" - hub = setup_config_entry.runtime_data - - assert hub.api.vapix.firmware_version == "9.80.1" - assert hub.api.vapix.product_number == "M1065-LW" - assert hub.api.vapix.product_type == "Network Camera" - assert hub.api.vapix.serial_number == "00408C123456" + assert device_entry == snapshot @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_support_mqtt( hass: HomeAssistant, mqtt_mock: MqttMockHAClient ) -> None: @@ -115,7 +74,7 @@ async def test_device_support_mqtt( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_MQTT]) @pytest.mark.parametrize("mqtt_status_code", [401]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> None: """Successful setup.""" mqtt_call = call(f"{MAC}/#", mock.ANY, 0, "utf-8") @@ -124,14 +83,14 @@ async def test_device_support_mqtt_low_privilege(mqtt_mock: MqttMockHAClient) -> async def test_update_address( hass: HomeAssistant, - setup_config_entry: ConfigEntry, - mock_vapix_requests: Callable[[str], None], + config_entry_setup: MockConfigEntry, + mock_requests: Callable[[str], None], ) -> None: """Test update address works.""" - hub = setup_config_entry.runtime_data + hub = config_entry_setup.runtime_data assert hub.api.config.host == "1.2.3.4" - mock_vapix_requests("2.3.4.5") + mock_requests("2.3.4.5") await hass.config_entries.flow.async_init( AXIS_DOMAIN, data=zeroconf.ZeroconfServiceInfo( @@ -150,11 +109,11 @@ async def test_update_address( assert hub.api.config.host == "2.3.4.5" -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_device_unavailable( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], - mock_rtsp_signal_state: Callable[[bool], None], + mock_rtsp_event: RtspEventMock, + mock_rtsp_signal_state: RtspStateType, ) -> None: """Successful setup.""" # Provide an entity that can be used to verify connection state on @@ -187,22 +146,12 @@ async def test_device_unavailable( assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{NAME}_sound_1").state == STATE_OFF -@pytest.mark.usefixtures("setup_default_vapix_requests") -async def test_device_not_accessible( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: - """Failed setup schedules a retry of setup.""" - with patch.object(axis, "get_axis_api", side_effect=axis.errors.CannotConnect): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert hass.data[AXIS_DOMAIN] == {} - - -@pytest.mark.usefixtures("setup_default_vapix_requests") +@pytest.mark.usefixtures("mock_default_requests") async def test_device_trigger_reauth_flow( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Failed authentication trigger a reauthentication flow.""" + config_entry.add_to_hass(hass) with ( patch.object( axis, "get_axis_api", side_effect=axis.errors.AuthenticationRequired @@ -212,18 +161,7 @@ async def test_device_trigger_reauth_flow( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() mock_flow_init.assert_called_once() - assert hass.data[AXIS_DOMAIN] == {} - - -@pytest.mark.usefixtures("setup_default_vapix_requests") -async def test_device_unknown_error( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: - """Unknown errors are handled.""" - with patch.object(axis, "get_axis_api", side_effect=Exception): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert hass.data[AXIS_DOMAIN] == {} + assert config_entry.state == ConfigEntryState.SETUP_ERROR async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: @@ -241,36 +179,31 @@ async def test_shutdown(config_entry_data: MappingProxyType[str, Any]) -> None: assert len(axis_device.api.stream.stop.mock_calls) == 1 -async def test_get_device_fails( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] +@pytest.mark.parametrize( + ("side_effect", "state"), + [ + # Device unauthorized yields authentication required error + (axislib.Unauthorized, ConfigEntryState.SETUP_ERROR), + # Device unavailable yields cannot connect error + (TimeoutError, ConfigEntryState.SETUP_RETRY), + (axislib.RequestError, ConfigEntryState.SETUP_RETRY), + # Device yield unknown error + (axislib.AxisException, ConfigEntryState.SETUP_ERROR), + ], +) +@pytest.mark.usefixtures("mock_default_requests") +async def test_get_axis_api_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, + state: ConfigEntryState, ) -> None: - """Device unauthorized yields authentication required error.""" - with ( - patch( - "axis.interfaces.vapix.Vapix.initialize", side_effect=axislib.Unauthorized - ), - pytest.raises(axis.errors.AuthenticationRequired), + """Failed setup schedules a retry of setup.""" + config_entry.add_to_hass(hass) + with patch( + "homeassistant.components.axis.hub.api.axis.interfaces.vapix.Vapix.initialize", + side_effect=side_effect, ): - await axis.hub.get_axis_api(hass, config_entry_data) - - -async def test_get_device_device_unavailable( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] -) -> None: - """Device unavailable yields cannot connect error.""" - with ( - patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.RequestError), - pytest.raises(axis.errors.CannotConnect), - ): - await axis.hub.get_axis_api(hass, config_entry_data) - - -async def test_get_device_unknown_error( - hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any] -) -> None: - """Device yield unknown error.""" - with ( - patch("axis.interfaces.vapix.Vapix.request", side_effect=axislib.AxisException), - pytest.raises(axis.errors.AuthenticationRequired), - ): - await axis.hub.get_axis_api(hass, config_entry_data) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state == state diff --git a/tests/components/axis/test_init.py b/tests/components/axis/test_init.py index e4dc7cd1eef..89737325440 100644 --- a/tests/components/axis/test_init.py +++ b/tests/components/axis/test_init.py @@ -5,19 +5,23 @@ from unittest.mock import AsyncMock, Mock, patch import pytest from homeassistant.components import axis -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry -async def test_setup_entry(setup_config_entry: ConfigEntry) -> None: + +async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: """Test successful setup of entry.""" - assert setup_config_entry.state is ConfigEntryState.LOADED + assert config_entry_setup.state is ConfigEntryState.LOADED async def test_setup_entry_fails( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test successful setup of entry.""" + config_entry.add_to_hass(hass) + mock_device = Mock() mock_device.async_setup = AsyncMock(return_value=False) @@ -30,18 +34,21 @@ async def test_setup_entry_fails( async def test_unload_entry( - hass: HomeAssistant, setup_config_entry: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test successful unload of entry.""" - assert setup_config_entry.state is ConfigEntryState.LOADED + assert config_entry_setup.state is ConfigEntryState.LOADED - assert await hass.config_entries.async_unload(setup_config_entry.entry_id) - assert setup_config_entry.state is ConfigEntryState.NOT_LOADED + assert await hass.config_entries.async_unload(config_entry_setup.entry_id) + assert config_entry_setup.state is ConfigEntryState.NOT_LOADED @pytest.mark.parametrize("config_entry_version", [1]) -async def test_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def test_migrate_entry( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test successful migration of entry data.""" + config_entry.add_to_hass(hass) assert config_entry.version == 1 mock_device = Mock() diff --git a/tests/components/axis/test_light.py b/tests/components/axis/test_light.py index a5ae66afee0..c33af5ec3a4 100644 --- a/tests/components/axis/test_light.py +++ b/tests/components/axis/test_light.py @@ -1,12 +1,12 @@ """Axis light platform tests.""" -from collections.abc import Callable from typing import Any from unittest.mock import patch from axis.models.api import CONTEXT import pytest import respx +from syrupy import SnapshotAssertion from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN from homeassistant.const import ( @@ -14,12 +14,16 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, - STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import DEFAULT_HOST, NAME +from tests.common import snapshot_platform + API_DISCOVERY_LIGHT_CONTROL = { "id": "light-control", "version": "1.1", @@ -69,10 +73,10 @@ def light_control_fixture(light_control_items: list[dict[str, Any]]) -> None: @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) @pytest.mark.parametrize("light_control_items", [[]]) -@pytest.mark.usefixtures("setup_config_entry") +@pytest.mark.usefixtures("config_entry_setup") async def test_no_light_entity_without_light_control_representation( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + mock_rtsp_event: RtspEventMock, ) -> None: """Verify no lights entities get created without light control representation.""" mock_rtsp_event( @@ -88,10 +92,12 @@ async def test_no_light_entity_without_light_control_representation( @pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_LIGHT_CONTROL]) -@pytest.mark.usefixtures("setup_config_entry") async def test_lights( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, ) -> None: """Test that lights are loaded properly.""" # Add light @@ -128,6 +134,9 @@ async def test_lights( }, ) + with patch("homeassistant.components.axis.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + mock_rtsp_event( topic="tns1:Device/tnsaxis:Light/Status", data_type="state", @@ -136,15 +145,10 @@ async def test_lights( source_idx="0", ) await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(LIGHT_DOMAIN)) == 1 + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) entity_id = f"{LIGHT_DOMAIN}.{NAME}_ir_light_0" - light_0 = hass.states.get(entity_id) - assert light_0.state == STATE_ON - assert light_0.name == f"{NAME} IR Light 0" - # Turn on, set brightness, light already on with ( patch("axis.interfaces.vapix.LightHandler.activate_light") as mock_activate, diff --git a/tests/components/axis/test_switch.py b/tests/components/axis/test_switch.py index 479830783b1..964cfdae64c 100644 --- a/tests/components/axis/test_switch.py +++ b/tests/components/axis/test_switch.py @@ -1,23 +1,27 @@ """Axis switch platform tests.""" -from collections.abc import Callable from unittest.mock import patch from axis.models.api import CONTEXT import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import API_DISCOVERY_PORT_MANAGEMENT, NAME +from tests.common import snapshot_platform + PORT_DATA = """root.IOPort.I0.Configurable=yes root.IOPort.I0.Direction=output root.IOPort.I0.Output.Name=Doorbell @@ -28,61 +32,6 @@ root.IOPort.I1.Output.Name= root.IOPort.I1.Output.Active=open """ - -@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) -@pytest.mark.usefixtures("setup_config_entry") -async def test_switches_with_port_cgi( - hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], -) -> None: - """Test that switches are loaded properly using port.cgi.""" - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="inactive", - source_name="RelayToken", - source_idx="0", - ) - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="active", - source_name="RelayToken", - source_idx="1", - ) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" - - entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" - - with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_on.assert_called_once_with("0") - - with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_off.assert_called_once_with("0") - - PORT_MANAGEMENT_RESPONSE = { "apiVersion": "1.0", "method": "getPorts", @@ -113,14 +62,18 @@ PORT_MANAGEMENT_RESPONSE = { } -@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) -@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) -@pytest.mark.usefixtures("setup_config_entry") -async def test_switches_with_port_management( +@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) +async def test_switches_with_port_cgi( hass: HomeAssistant, - mock_rtsp_event: Callable[[str, str, str, str, str, str], None], + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, ) -> None: - """Test that switches are loaded properly using port management.""" + """Test that switches are loaded properly using port.cgi.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", @@ -137,30 +90,61 @@ async def test_switches_with_port_management( ) await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" + with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_on.assert_called_once_with("0") - # State update + with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_off.assert_called_once_with("0") + +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) +@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) +async def test_switches_with_port_management( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that switches are loaded properly using port management.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="inactive", + source_name="RelayToken", + source_idx="0", + ) mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", data_value="active", source_name="RelayToken", - source_idx="0", + source_idx="1", ) await hass.async_block_till_done() - assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" with patch("axis.interfaces.vapix.IoPortManagement.close") as mock_turn_on: await hass.services.async_call( @@ -179,3 +163,16 @@ async def test_switches_with_port_management( blocking=True, ) mock_turn_off.assert_called_once_with("0") + + # State update + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="active", + source_name="RelayToken", + source_idx="0", + ) + await hass.async_block_till_done() + + assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON diff --git a/tests/components/azure_data_explorer/conftest.py b/tests/components/azure_data_explorer/conftest.py index 4168021b333..f8915a12ce1 100644 --- a/tests/components/azure_data_explorer/conftest.py +++ b/tests/components/azure_data_explorer/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for Azure Data Explorer.""" +from collections.abc import Generator from datetime import timedelta import logging from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.azure_data_explorer.const import ( CONF_FILTER, diff --git a/tests/components/azure_event_hub/conftest.py b/tests/components/azure_event_hub/conftest.py index a34f2e646f2..b814a845c86 100644 --- a/tests/components/azure_event_hub/conftest.py +++ b/tests/components/azure_event_hub/conftest.py @@ -1,5 +1,6 @@ """Test fixtures for AEH.""" +from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from datetime import timedelta import logging @@ -8,7 +9,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from azure.eventhub.aio import EventHubProducerClient import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.azure_event_hub.const import ( CONF_FILTER, diff --git a/tests/components/balboa/conftest.py b/tests/components/balboa/conftest.py index fbdc2f8a759..0bb8b2cd468 100644 --- a/tests/components/balboa/conftest.py +++ b/tests/components/balboa/conftest.py @@ -2,12 +2,11 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest.mock import AsyncMock, MagicMock, patch from pybalboa.enums import HeatMode, LowHighRange import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/balboa/snapshots/test_fan.ambr b/tests/components/balboa/snapshots/test_fan.ambr index 2b87a961906..8d35ab6de7c 100644 --- a/tests/components/balboa/snapshots/test_fan.ambr +++ b/tests/components/balboa/snapshots/test_fan.ambr @@ -28,7 +28,7 @@ 'original_name': 'Pump 1', 'platform': 'balboa', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': 'pump', 'unique_id': 'FakeSpa-Pump 1-c0ffee', 'unit_of_measurement': None, @@ -42,7 +42,7 @@ 'percentage_step': 50.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.fakespa_pump_1', diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index c877f2858cd..850184a7d71 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -85,6 +85,8 @@ async def test_spa_temperature( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test spa temperature settings.""" + client.temperature_minimum = 110 + client.temperature_maximum = 250 # flip the spa into F # set temp to a valid number state = await _patch_spa_settemp(hass, client, 0, 100) diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 1fbcbe0fe69..4764798f34d 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -3,10 +3,26 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch -from mozart_api.models import BeolinkPeer +from mozart_api.models import ( + Action, + BeolinkPeer, + ContentItem, + PlaybackContentMetadata, + PlaybackProgress, + PlaybackState, + ProductState, + RemoteMenuItem, + RenderingState, + SoftwareUpdateStatus, + Source, + SourceArray, + SourceTypeEnum, + VolumeState, +) import pytest from homeassistant.components.bang_olufsen.const import DOMAIN +from homeassistant.core import HomeAssistant from .const import ( TEST_DATA_CREATE_ENTRY, @@ -30,10 +46,17 @@ def mock_config_entry(): ) +@pytest.fixture +async def mock_media_player(hass: HomeAssistant, mock_config_entry, mock_mozart_client): + """Mock media_player entity.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + @pytest.fixture def mock_mozart_client() -> Generator[AsyncMock]: """Mock MozartClient.""" - with ( patch( "homeassistant.components.bang_olufsen.MozartClient", autospec=True @@ -50,6 +73,170 @@ def mock_mozart_client() -> Generator[AsyncMock]: client.get_beolink_self.return_value = BeolinkPeer( friendly_name=TEST_FRIENDLY_NAME, jid=TEST_JID_1 ) + client.get_softwareupdate_status = AsyncMock() + client.get_softwareupdate_status.return_value = SoftwareUpdateStatus( + software_version="1.0.0", state="" + ) + client.get_product_state = AsyncMock() + client.get_product_state.return_value = ProductState( + volume=VolumeState(), + playback=PlaybackState( + metadata=PlaybackContentMetadata(), + progress=PlaybackProgress(), + source=Source(), + state=RenderingState(value="started"), + ), + ) + client.get_available_sources = AsyncMock() + client.get_available_sources.return_value = SourceArray( + items=[ + # Is in the HIDDEN_SOURCE_IDS constant, so should not be user selectable + Source( + name="AirPlay", + id="airPlay", + is_enabled=True, + is_multiroom_available=False, + ), + # The only available source + Source( + name="Tidal", + id="tidal", + is_enabled=True, + is_multiroom_available=True, + ), + # Is disabled, so should not be user selectable + Source( + name="Powerlink", + id="pl", + is_enabled=False, + ), + ] + ) + client.get_remote_menu = AsyncMock() + client.get_remote_menu.return_value = { + # Music category, so shouldn't be included in video sources + "b355888b-2cde-5f94-8592-d47b71d52a27": RemoteMenuItem( + action_list=[ + Action( + button_name=None, + content_id="netRadio://6629967157728971", + deezer_user_id=None, + gain_db=None, + listening_mode_id=None, + preset_key=None, + queue_item=None, + queue_settings=None, + radio_station_id=None, + source=None, + speaker_group_id=None, + stand_position=None, + stop_duration=None, + tone_name=None, + type="triggerContent", + volume_level=None, + ) + ], + scene_list=None, + disabled=None, + dynamic_list=None, + first_child_menu_item_id=None, + label="Yle Radio Suomi Helsinki", + next_sibling_menu_item_id="0b4552f8-7ac6-5046-9d44-5410a815b8d6", + parent_menu_item_id="eee0c2d0-2b3a-4899-a708-658475c38926", + available=None, + content=ContentItem( + categories=["music"], + content_uri="netRadio://6629967157728971", + label="Yle Radio Suomi Helsinki", + source=SourceTypeEnum(value="netRadio"), + ), + fixed=True, + id="b355888b-2cde-5f94-8592-d47b71d52a27", + ), + # Has "hdmi" as category, so should be included in video sources + "b6591565-80f4-4356-bcd9-c92ca247f0a9": RemoteMenuItem( + action_list=[ + Action( + button_name=None, + content_id="tv://hdmi_1", + deezer_user_id=None, + gain_db=None, + listening_mode_id=None, + preset_key=None, + queue_item=None, + queue_settings=None, + radio_station_id=None, + source=None, + speaker_group_id=None, + stand_position=None, + stop_duration=None, + tone_name=None, + type="triggerContent", + volume_level=None, + ) + ], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id=None, + label="HDMI A", + next_sibling_menu_item_id="0ba98974-7b1f-40dc-bc48-fbacbb0f1793", + parent_menu_item_id="b66c835b-6b98-4400-8f84-6348043792c7", + available=True, + content=ContentItem( + categories=["hdmi"], + content_uri="tv://hdmi_1", + label="HDMI A", + source=SourceTypeEnum(value="tv"), + ), + fixed=False, + id="b6591565-80f4-4356-bcd9-c92ca247f0a9", + ), + # The parent remote menu item. Has the TV label and should therefore not be included in video sources + "b66c835b-6b98-4400-8f84-6348043792c7": RemoteMenuItem( + action_list=[], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id="b6591565-80f4-4356-bcd9-c92ca247f0a9", + label="TV", + next_sibling_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", + parent_menu_item_id=None, + available=True, + content=None, + fixed=True, + id="b66c835b-6b98-4400-8f84-6348043792c7", + ), + # Has an empty content, so should not be included + "64c9da45-3682-44a4-8030-09ed3ef44160": RemoteMenuItem( + action_list=[], + scene_list=None, + disabled=False, + dynamic_list="none", + first_child_menu_item_id=None, + label="ListeningPosition", + next_sibling_menu_item_id=None, + parent_menu_item_id="0c4547fe-d3cc-4348-a425-473595b8c9fb", + available=True, + content=None, + fixed=True, + id="64c9da45-3682-44a4-8030-09ed3ef44160", + ), + } + client.post_standby = AsyncMock() + client.set_current_volume_level = AsyncMock() + client.set_volume_mute = AsyncMock() + client.post_playback_command = AsyncMock() + client.seek_to_position = AsyncMock() + client.post_clear_queue = AsyncMock() + client.post_overlay_play = AsyncMock() + client.post_uri_source = AsyncMock() + client.run_provided_scene = AsyncMock() + client.activate_preset = AsyncMock() + client.start_deezer_flow = AsyncMock() + client.add_to_queue = AsyncMock() + client.post_remote_trigger = AsyncMock() + client.set_active_source = AsyncMock() # Non-REST API client methods client.check_device_connection = AsyncMock() diff --git a/tests/components/bang_olufsen/const.py b/tests/components/bang_olufsen/const.py index 187f93108a1..d5e2221675a 100644 --- a/tests/components/bang_olufsen/const.py +++ b/tests/components/bang_olufsen/const.py @@ -1,6 +1,25 @@ """Constants used for testing the bang_olufsen integration.""" from ipaddress import IPv4Address, IPv6Address +from unittest.mock import Mock + +from mozart_api.exceptions import ApiException +from mozart_api.models import ( + Action, + OverlayPlayRequest, + OverlayPlayRequestTextToSpeechTextToSpeech, + PlaybackContentMetadata, + PlaybackError, + PlaybackProgress, + PlayQueueItem, + PlayQueueItemType, + RenderingState, + SceneProperties, + UserFlow, + VolumeLevel, + VolumeMute, + VolumeState, +) from homeassistant.components.bang_olufsen.const import ( ATTR_FRIENDLY_NAME, @@ -8,6 +27,7 @@ from homeassistant.components.bang_olufsen.const import ( ATTR_SERIAL_NUMBER, ATTR_TYPE_NUMBER, CONF_BEOLINK_JID, + BangOlufsenSource, ) from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME @@ -24,7 +44,7 @@ TEST_FRIENDLY_NAME = "Living room Balance" TEST_TYPE_NUMBER = "1111" TEST_ITEM_NUMBER = "1111111" TEST_JID_1 = f"{TEST_TYPE_NUMBER}.{TEST_ITEM_NUMBER}.{TEST_SERIAL_NUMBER}@products.bang-olufsen.com" - +TEST_MEDIA_PLAYER_ENTITY_ID = "media_player.beosound_balance_11111111" TEST_HOSTNAME_ZEROCONF = TEST_NAME.replace(" ", "-") + ".local." TEST_TYPE_ZEROCONF = "_bangolufsen._tcp.local." @@ -80,3 +100,80 @@ TEST_DATA_ZEROCONF_IPV6 = ZeroconfServiceInfo( ATTR_ITEM_NUMBER: TEST_ITEM_NUMBER, }, ) + +TEST_AUDIO_SOURCES = [BangOlufsenSource.TIDAL.name] +TEST_VIDEO_SOURCES = ["HDMI A"] +TEST_SOURCES = TEST_AUDIO_SOURCES + TEST_VIDEO_SOURCES +TEST_FALLBACK_SOURCES = [ + "Audio Streamer", + "Spotify Connect", + "Line-In", + "Optical", + "B&O Radio", + "Deezer", + "Tidal Connect", +] +TEST_PLAYBACK_METADATA = PlaybackContentMetadata( + album_name="Test album", + artist_name="Test artist", + organization="Test organization", + title="Test title", + total_duration_seconds=123, + track=1, +) +TEST_PLAYBACK_ERROR = PlaybackError(error="Test error") +TEST_PLAYBACK_PROGRESS = PlaybackProgress(progress=123) +TEST_PLAYBACK_STATE_PAUSED = RenderingState(value="paused") +TEST_PLAYBACK_STATE_PLAYING = RenderingState(value="started") +TEST_VOLUME = VolumeState(level=VolumeLevel(level=40)) +TEST_VOLUME_HOME_ASSISTANT_FORMAT = 0.4 +TEST_PLAYBACK_STATE_TURN_OFF = RenderingState(value="stopped") +TEST_VOLUME_MUTED = VolumeState( + muted=VolumeMute(muted=True), level=VolumeLevel(level=40) +) +TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT = True +TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT = 10.0 +TEST_SEEK_POSITION = 10000 +TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS = OverlayPlayRequest( + text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( + lang="da-dk", text="Dette er en test" + ) +) +TEST_OVERLAY_OFFSET_VOLUME_TTS = OverlayPlayRequest( + text_to_speech=OverlayPlayRequestTextToSpeechTextToSpeech( + lang="en-us", text="This is a test" + ), + volume_absolute=60, +) +TEST_RADIO_STATION = SceneProperties( + action_list=[ + Action( + type="radio", + radio_station_id="1234567890123456", + ) + ] +) +TEST_DEEZER_FLOW = UserFlow(user_id="123") +TEST_DEEZER_PLAYLIST = PlayQueueItem( + provider=PlayQueueItemType(value="deezer"), + start_now_from_position=123, + type="playlist", + uri="playlist:1234567890", +) +TEST_DEEZER_TRACK = PlayQueueItem( + provider=PlayQueueItemType(value="deezer"), + start_now_from_position=0, + type="track", + uri="1234567890", +) + +# codespell can't see the escaped ', so it thinks the word is misspelled +TEST_DEEZER_INVALID_FLOW = ApiException( + status=400, + reason="Bad Request", + http_resp=Mock( + status=400, + reason="Bad Request", + data='{"message": "Couldn\'t start user flow for me"}', # codespell:ignore + ), +) diff --git a/tests/components/bang_olufsen/test_config_flow.py b/tests/components/bang_olufsen/test_config_flow.py index ad513905f16..e637120a6ae 100644 --- a/tests/components/bang_olufsen/test_config_flow.py +++ b/tests/components/bang_olufsen/test_config_flow.py @@ -132,7 +132,7 @@ async def test_config_flow_zeroconf(hass: HomeAssistant, mock_mozart_client) -> assert result_confirm["type"] is FlowResultType.CREATE_ENTRY assert result_confirm["data"] == TEST_DATA_CREATE_ENTRY - assert mock_mozart_client.get_beolink_self.call_count == 0 + assert mock_mozart_client.get_beolink_self.call_count == 1 async def test_config_flow_zeroconf_not_mozart_device(hass: HomeAssistant) -> None: @@ -159,3 +159,21 @@ async def test_config_flow_zeroconf_ipv6(hass: HomeAssistant) -> None: assert result_user["type"] is FlowResultType.ABORT assert result_user["reason"] == "ipv6_address" + + +async def test_config_flow_zeroconf_invalid_ip( + hass: HomeAssistant, mock_mozart_client +) -> None: + """Test zeroconf discovery with invalid IP address.""" + mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( + Mock(), Mock() + ) + + result_user = await hass.config_entries.flow.async_init( + handler=DOMAIN, + context={CONF_SOURCE: SOURCE_ZEROCONF}, + data=TEST_DATA_ZEROCONF, + ) + + assert result_user["type"] is FlowResultType.ABORT + assert result_user["reason"] == "invalid_address" diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py new file mode 100644 index 00000000000..74867a8eedf --- /dev/null +++ b/tests/components/bang_olufsen/test_media_player.py @@ -0,0 +1,1067 @@ +"""Test the Bang & Olufsen media_player entity.""" + +from contextlib import nullcontext as does_not_raise +from unittest.mock import ANY, patch + +from mozart_api.models import PlaybackContentMetadata +import pytest + +from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_STATES, + DOMAIN, + BangOlufsenSource, + WebsocketNotification, +) +from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_INPUT_SOURCE_LIST, + ATTR_MEDIA_ALBUM_ARTIST, + ATTR_MEDIA_ALBUM_NAME, + ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_CHANNEL, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_DURATION, + ATTR_MEDIA_EXTRA, + ATTR_MEDIA_POSITION, + ATTR_MEDIA_POSITION_UPDATED_AT, + ATTR_MEDIA_SEEK_POSITION, + ATTR_MEDIA_TITLE, + ATTR_MEDIA_TRACK, + ATTR_MEDIA_VOLUME_LEVEL, + ATTR_MEDIA_VOLUME_MUTED, + MediaPlayerState, + MediaType, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.setup import async_setup_component + +from .const import ( + TEST_AUDIO_SOURCES, + TEST_DEEZER_FLOW, + TEST_DEEZER_INVALID_FLOW, + TEST_DEEZER_PLAYLIST, + TEST_DEEZER_TRACK, + TEST_FALLBACK_SOURCES, + TEST_MEDIA_PLAYER_ENTITY_ID, + TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS, + TEST_OVERLAY_OFFSET_VOLUME_TTS, + TEST_PLAYBACK_ERROR, + TEST_PLAYBACK_METADATA, + TEST_PLAYBACK_PROGRESS, + TEST_PLAYBACK_STATE_PAUSED, + TEST_PLAYBACK_STATE_PLAYING, + TEST_PLAYBACK_STATE_TURN_OFF, + TEST_RADIO_STATION, + TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, + TEST_SERIAL_NUMBER, + TEST_SOURCES, + TEST_VIDEO_SOURCES, + TEST_VOLUME, + TEST_VOLUME_HOME_ASSISTANT_FORMAT, + TEST_VOLUME_MUTED, + TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT, +) + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +async def test_initialization( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_mozart_client +) -> None: + """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" + + # Setup entity + with patch( + "homeassistant.components.bang_olufsen.media_player._LOGGER.debug" + ) as mock_logger: + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Ensure that the logger has been called with the debug message + mock_logger.assert_called_once_with( + "Connected to: %s %s running SW %s", "Beosound Balance", "11111111", "1.0.0" + ) + + # Check state (The initial state in this test does not contain all that much. + # States are tested using simulated WebSocket events.) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_SOURCES + assert states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + + # Check API calls + mock_mozart_client.get_softwareupdate_status.assert_called_once() + mock_mozart_client.get_product_state.assert_called_once() + mock_mozart_client.get_available_sources.assert_called_once() + mock_mozart_client.get_remote_menu.assert_called_once() + + +async def test_async_update_sources_audio_only( + hass: HomeAssistant, mock_config_entry, mock_mozart_client +) -> None: + """Test sources are correctly handled in _async_update_sources.""" + mock_mozart_client.get_remote_menu.return_value = {} + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_AUDIO_SOURCES + + +async def test_async_update_sources_outdated_api( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test fallback sources are correctly handled in _async_update_sources.""" + mock_mozart_client.get_available_sources.side_effect = ValueError() + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_INPUT_SOURCE_LIST] + == TEST_FALLBACK_SOURCES + TEST_VIDEO_SOURCES + ) + + +async def test_async_update_playback_metadata( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_metadata.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_DURATION not in states.attributes + assert ATTR_MEDIA_TITLE not in states.attributes + assert ATTR_MEDIA_ALBUM_NAME not in states.attributes + assert ATTR_MEDIA_ALBUM_ARTIST not in states.attributes + assert ATTR_MEDIA_TRACK not in states.attributes + assert ATTR_MEDIA_CHANNEL not in states.attributes + + # Send the WebSocket event dispatch + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", + TEST_PLAYBACK_METADATA, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_DURATION] + == TEST_PLAYBACK_METADATA.total_duration_seconds + ) + assert states.attributes[ATTR_MEDIA_TITLE] == TEST_PLAYBACK_METADATA.title + assert states.attributes[ATTR_MEDIA_ALBUM_NAME] == TEST_PLAYBACK_METADATA.album_name + assert ( + states.attributes[ATTR_MEDIA_ALBUM_ARTIST] == TEST_PLAYBACK_METADATA.artist_name + ) + assert states.attributes[ATTR_MEDIA_TRACK] == TEST_PLAYBACK_METADATA.track + assert states.attributes[ATTR_MEDIA_CHANNEL] == TEST_PLAYBACK_METADATA.organization + + +async def test_async_update_playback_error( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_error.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable + with patch("homeassistant.helpers.dispatcher._LOGGER.error") as mock_logger: + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", + TEST_PLAYBACK_ERROR, + ) + + # The traceback can't be tested, so it is replaced with "ANY" + mock_logger.assert_called_once_with( + "%s\n%s", + "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)", + ANY, + ) + + +async def test_async_update_playback_progress( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_progress.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_POSITION not in states.attributes + old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + assert old_updated_at + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", + TEST_PLAYBACK_PROGRESS, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress + new_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] + assert new_updated_at + assert old_updated_at != new_updated_at + + +async def test_async_update_playback_state( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test _async_update_playback_state.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == MediaPlayerState.PLAYING + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_PAUSED, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == TEST_PLAYBACK_STATE_PAUSED.value + + +@pytest.mark.parametrize( + ("reported_source", "real_source", "content_type", "progress", "metadata"), + [ + # Normal source, music mediatype expected, no progress expected + ( + BangOlufsenSource.TIDAL, + BangOlufsenSource.TIDAL, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(), + ), + # URI source, url media type expected, no progress expected + ( + BangOlufsenSource.URI_STREAMER, + BangOlufsenSource.URI_STREAMER, + MediaType.URL, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(), + ), + # Line-In source,media type expected, progress 0 expected + ( + BangOlufsenSource.LINE_IN, + BangOlufsenSource.CHROMECAST, + MediaType.MUSIC, + 0, + PlaybackContentMetadata(), + ), + # Chromecast as source, but metadata says Line-In. + # Progress is not set to 0 as the source is Chromecast first + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.LINE_IN, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.LINE_IN.name), + ), + # Chromecast as source, but metadata says Bluetooth + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(title=BangOlufsenSource.BLUETOOTH.name), + ), + # Chromecast as source, but metadata says Bluetooth in another way + ( + BangOlufsenSource.CHROMECAST, + BangOlufsenSource.BLUETOOTH, + MediaType.MUSIC, + TEST_PLAYBACK_PROGRESS.progress, + PlaybackContentMetadata(art=[]), + ), + ], +) +async def test_async_update_source_change( + reported_source, + real_source, + content_type, + progress, + metadata, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test _async_update_source_change.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_INPUT_SOURCE not in states.attributes + assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC + + # Simulate progress attribute being available + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", + TEST_PLAYBACK_PROGRESS, + ) + + # Simulate metadata + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", + metadata, + ) + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", + reported_source, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name + assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type + assert states.attributes[ATTR_MEDIA_POSITION] == progress + + +async def test_async_turn_off( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_turn_off.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "turn_off", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_TURN_OFF, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] + + # Check API call + mock_mozart_client.post_standby.assert_called_once() + + +async def test_async_set_volume_level( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_set_volume_level and _async_update_volume by proxy.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes + + await hass.services.async_call( + "media_player", + "volume_set", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_VOLUME_LEVEL: TEST_VOLUME_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + # The service call will trigger a WebSocket notification + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_VOLUME_LEVEL] == TEST_VOLUME_HOME_ASSISTANT_FORMAT + ) + + mock_mozart_client.set_current_volume_level.assert_called_once_with( + volume_level=TEST_VOLUME.level + ) + + +async def test_async_mute_volume( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_mute_volume.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes + + await hass.services.async_call( + "media_player", + "volume_mute", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_VOLUME_MUTED: TEST_VOLUME_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + # The service call will trigger a WebSocket notification + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME_MUTED, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert ( + states.attributes[ATTR_MEDIA_VOLUME_MUTED] + == TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT + ) + + mock_mozart_client.set_volume_mute.assert_called_once_with( + volume_mute=TEST_VOLUME_MUTED.muted + ) + + +@pytest.mark.parametrize( + ("initial_state", "command"), + [ + # Current state is playing, "pause" command expected + (TEST_PLAYBACK_STATE_PLAYING, "pause"), + # Current state is paused, "play" command expected + (TEST_PLAYBACK_STATE_PAUSED, "play"), + ], +) +async def test_async_media_play_pause( + initial_state, + command, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_media_play_pause.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the initial state + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + initial_state, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[initial_state.value] + + await hass.services.async_call( + "media_player", + "media_play_pause", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command=command) + + +async def test_async_media_stop( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_stop.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the state to playing + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", + TEST_PLAYBACK_STATE_PLAYING, + ) + + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] + + await hass.services.async_call( + "media_player", + "media_stop", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + # Check API call + mock_mozart_client.post_playback_command.assert_called_once_with(command="stop") + + +async def test_async_media_next_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_next_track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "media_next_track", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command="skip") + + +@pytest.mark.parametrize( + ("source", "expected_result", "seek_called_times"), + [ + # Deezer source, seek expected + (BangOlufsenSource.DEEZER, does_not_raise(), 1), + # Non deezer source, seek shouldn't work + (BangOlufsenSource.TIDAL, pytest.raises(HomeAssistantError), 0), + ], +) +async def test_async_media_seek( + source, + expected_result, + seek_called_times, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_media_seek.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the source + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", + source, + ) + + # Check results + with expected_result: + await hass.services.async_call( + "media_player", + "media_seek", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_SEEK_POSITION: TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, + }, + blocking=True, + ) + + assert mock_mozart_client.seek_to_position.call_count == seek_called_times + + +async def test_async_media_previous_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_media_previous_track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "media_previous_track", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_playback_command.assert_called_once_with(command="prev") + + +async def test_async_clear_playlist( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_clear_playlist.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "clear_playlist", + {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, + blocking=True, + ) + + mock_mozart_client.post_clear_queue.assert_called_once() + + +@pytest.mark.parametrize( + ("source", "expected_result", "audio_source_call", "video_source_call"), + [ + # Invalid source + ("Test source", pytest.raises(ServiceValidationError), 0, 0), + # Valid audio source + (BangOlufsenSource.TIDAL.name, does_not_raise(), 1, 0), + # Valid video source + (TEST_VIDEO_SOURCES[0], does_not_raise(), 0, 1), + ], +) +async def test_async_select_source( + source, + expected_result, + audio_source_call, + video_source_call, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, +) -> None: + """Test async_select_source with an invalid source.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with expected_result: + await hass.services.async_call( + "media_player", + "select_source", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_INPUT_SOURCE: source, + }, + blocking=True, + ) + + assert mock_mozart_client.set_active_source.call_count == audio_source_call + assert mock_mozart_client.post_remote_trigger.call_count == video_source_call + + +async def test_async_play_media_invalid_type( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media only accepts valid media types.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with pytest.raises(ServiceValidationError) as exc_info: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "test", + ATTR_MEDIA_CONTENT_TYPE: "invalid type", + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "invalid_media_type" + assert exc_info.errisinstance(HomeAssistantError) + + +async def test_async_play_media_url( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media URL.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Setup media source + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", + }, + blocking=True, + ) + + mock_mozart_client.post_uri_source.assert_called_once() + + +async def test_async_play_media_overlay_absolute_volume_uri( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media overlay with Home Assistant local URI and absolute volume.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"overlay_absolute_volume": 60}, + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once() + + # Check that the API call was as expected + args, _ = mock_mozart_client.post_overlay_play.call_args + assert args[0].volume_absolute == 60 + assert "/local/doorbell.mp3" in args[0].uri.location + + +async def test_async_play_media_overlay_invalid_offset_volume_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with patch( + "homeassistant.components.bang_olufsen.media_player._LOGGER.warning" + ) as mock_logger: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "Dette er en test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: { + "overlay_offset_volume": 20, + "overlay_tts_language": "da-dk", + }, + }, + blocking=True, + ) + mock_logger.assert_called_once_with("Error setting volume") + + mock_mozart_client.post_overlay_play.assert_called_once_with( + TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS + ) + + +async def test_async_play_media_overlay_offset_volume_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Set the volume to enable offset + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", + TEST_VOLUME, + ) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "This is a test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"overlay_offset_volume": 20}, + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once_with( + TEST_OVERLAY_OFFSET_VOLUME_TTS + ) + + +async def test_async_play_media_tts( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Home Assistant tts.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "provider", + }, + blocking=True, + ) + + mock_mozart_client.post_overlay_play.assert_called_once() + + +async def test_async_play_media_radio( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with B&O radio.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1234567890123456", + ATTR_MEDIA_CONTENT_TYPE: "radio", + }, + blocking=True, + ) + + mock_mozart_client.run_provided_scene.assert_called_once_with( + scene_properties=TEST_RADIO_STATION + ) + + +async def test_async_play_media_favourite( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with B&O favourite.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1", + ATTR_MEDIA_CONTENT_TYPE: "favourite", + }, + blocking=True, + ) + + mock_mozart_client.activate_preset.assert_called_once_with(id=int("1")) + + +async def test_async_play_media_deezer_flow( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer flow.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Send a service call + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "flow", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + ATTR_MEDIA_EXTRA: {"id": "123"}, + }, + blocking=True, + ) + + mock_mozart_client.start_deezer_flow.assert_called_once_with( + user_flow=TEST_DEEZER_FLOW + ) + + +async def test_async_play_media_deezer_playlist( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer playlist.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "playlist:1234567890", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + ATTR_MEDIA_EXTRA: {"start_from": 123}, + }, + blocking=True, + ) + + mock_mozart_client.add_to_queue.assert_called_once_with( + play_queue_item=TEST_DEEZER_PLAYLIST + ) + + +async def test_async_play_media_deezer_track( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with Deezer track.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "1234567890", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + }, + blocking=True, + ) + + mock_mozart_client.add_to_queue.assert_called_once_with( + play_queue_item=TEST_DEEZER_TRACK + ) + + +async def test_async_play_media_invalid_deezer( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media with an invalid/no Deezer login.""" + + mock_mozart_client.start_deezer_flow.side_effect = TEST_DEEZER_INVALID_FLOW + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + with pytest.raises(HomeAssistantError) as exc_info: + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "flow", + ATTR_MEDIA_CONTENT_TYPE: "deezer", + }, + blocking=True, + ) + + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "play_media_error" + assert exc_info.errisinstance(HomeAssistantError) + + mock_mozart_client.start_deezer_flow.assert_called_once() + + +async def test_async_play_media_url_m3u( + hass: HomeAssistant, mock_mozart_client, mock_config_entry +) -> None: + """Test async_play_media URL with the m3u extension.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + with ( + pytest.raises(HomeAssistantError) as exc_info, + patch( + "homeassistant.components.bang_olufsen.media_player.async_process_play_media_url", + return_value="https://test.com/test.m3u", + ), + ): + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", + ATTR_MEDIA_CONTENT_TYPE: "audio/mpeg", + }, + blocking=True, + ) + + # Check exception + assert exc_info.value.translation_domain == DOMAIN + assert exc_info.value.translation_key == "m3u_invalid_format" + assert exc_info.errisinstance(HomeAssistantError) + + mock_mozart_client.post_uri_source.assert_not_called() + + +@pytest.mark.parametrize( + ("child", "present"), + [ + # Audio source expected + ( + { + "title": "test.mp3", + "media_class": "music", + "media_content_type": "audio/mpeg", + "media_content_id": "media-source://media_source/local/test.mp3", + "can_play": True, + "can_expand": False, + "thumbnail": None, + "children_media_class": None, + }, + True, + ), + # Video source not expected + ( + { + "title": "test.mp4", + "media_class": "video", + "media_content_type": "video/mp4", + "media_content_id": ("media-source://media_source/local/test.mp4"), + "can_play": True, + "can_expand": False, + "thumbnail": None, + "children_media_class": None, + }, + False, + ), + ], +) +async def test_async_browse_media( + child, + present, + hass: HomeAssistant, + mock_mozart_client, + mock_config_entry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test async_browse_media with audio and video source.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await async_setup_component(hass, "media_source", {"media_source": {}}) + + client = await hass_ws_client() + await client.send_json_auto_id( + { + "type": "media_player/browse_media", + "entity_id": TEST_MEDIA_PLAYER_ENTITY_ID, + } + ) + response = await client.receive_json() + assert response["success"] + + assert (child in response["result"]["children"]) is present diff --git a/tests/components/bayesian/test_binary_sensor.py b/tests/components/bayesian/test_binary_sensor.py index e4f646572cb..818e9bed909 100644 --- a/tests/components/bayesian/test_binary_sensor.py +++ b/tests/components/bayesian/test_binary_sensor.py @@ -718,17 +718,18 @@ async def test_observed_entities(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored"] == state.attributes.get( - "occurred_observation_entities" - ) + assert state.attributes.get("occurred_observation_entities") == [ + "sensor.test_monitored" + ] hass.states.async_set("sensor.test_monitored1", "on") await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( - state.attributes.get("occurred_observation_entities") - ) + assert sorted(state.attributes.get("occurred_observation_entities")) == [ + "sensor.test_monitored", + "sensor.test_monitored1", + ] async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: @@ -785,9 +786,10 @@ async def test_state_attributes_are_serializable(hass: HomeAssistant) -> None: await hass.async_block_till_done() state = hass.states.get("binary_sensor.test_binary") - assert ["sensor.test_monitored", "sensor.test_monitored1"] == sorted( - state.attributes.get("occurred_observation_entities") - ) + assert sorted(state.attributes.get("occurred_observation_entities")) == [ + "sensor.test_monitored", + "sensor.test_monitored1", + ] for attrs in state.attributes.values(): json.dumps(attrs) diff --git a/tests/components/binary_sensor/test_device_condition.py b/tests/components/binary_sensor/test_device_condition.py index c2bd29fad36..8a0132ff2af 100644 --- a/tests/components/binary_sensor/test_device_condition.py +++ b/tests/components/binary_sensor/test_device_condition.py @@ -22,7 +22,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -239,7 +232,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -308,26 +301,26 @@ async def test_if_state( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for turn_on and turn_off conditions.""" @@ -375,19 +368,19 @@ async def test_if_state_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for firing if condition is on with delay.""" @@ -439,26 +432,26 @@ async def test_if_fires_on_for_condition( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future time_freeze.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future time_freeze.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/binary_sensor/test_device_trigger.py b/tests/components/binary_sensor/test_device_trigger.py index f91a336061d..78e382f77bf 100644 --- a/tests/components/binary_sensor/test_device_trigger.py +++ b/tests/components/binary_sensor/test_device_trigger.py @@ -22,7 +22,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -240,7 +233,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for on and off triggers firing.""" @@ -313,21 +306,22 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"not_bat_low device - {entry.entity_id} - on - off - None" ) hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] == f"bat_low device - {entry.entity_id} - off - on - None" + service_calls[1].data["some"] + == f"bat_low device - {entry.entity_id} - off - on - None" ) @@ -335,7 +329,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing with delay.""" @@ -388,17 +382,17 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) @@ -407,7 +401,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_binary_sensor_entities: dict[str, MockBinarySensor], ) -> None: """Test for triggers firing.""" @@ -459,12 +453,12 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) diff --git a/tests/components/binary_sensor/test_init.py b/tests/components/binary_sensor/test_init.py index 8f14063e011..ea0ad05a0db 100644 --- a/tests/components/binary_sensor/test_init.py +++ b/tests/components/binary_sensor/test_init.py @@ -1,9 +1,9 @@ """The tests for the Binary sensor component.""" +from collections.abc import Generator from unittest import mock import pytest -from typing_extensions import Generator from homeassistant.components import binary_sensor from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/blink/test_diagnostics.py b/tests/components/blink/test_diagnostics.py index 3b120d23038..d527633d4c9 100644 --- a/tests/components/blink/test_diagnostics.py +++ b/tests/components/blink/test_diagnostics.py @@ -31,4 +31,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/blueprint/common.py b/tests/components/blueprint/common.py index dd59b6df082..037aa38f6cb 100644 --- a/tests/components/blueprint/common.py +++ b/tests/components/blueprint/common.py @@ -1,9 +1,8 @@ """Blueprints test helpers.""" +from collections.abc import Generator from unittest.mock import patch -from typing_extensions import Generator - def stub_blueprint_populate_fixture_helper() -> Generator[None]: """Stub copying the blueprints to the config folder.""" diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index f135bbf23b8..94036d208ab 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -192,9 +192,28 @@ async def test_fetch_blueprint_from_website_url( assert imported_blueprint.blueprint.metadata["source_url"] == url -async def test_fetch_blueprint_from_unsupported_url(hass: HomeAssistant) -> None: - """Test fetching blueprint from an unsupported URL.""" - url = "https://example.com/unsupported.yaml" +async def test_fetch_blueprint_from_generic_url( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test fetching blueprint from url.""" + aioclient_mock.get( + "https://example.org/path/someblueprint.yaml", + text=Path( + hass.config.path("blueprints/automation/test_event_service.yaml") + ).read_text(encoding="utf8"), + ) - with pytest.raises(HomeAssistantError, match=r"^Unsupported URL$"): - await importer.fetch_blueprint_from_url(hass, url) + url = "https://example.org/path/someblueprint.yaml" + imported_blueprint = await importer.fetch_blueprint_from_url(hass, url) + assert isinstance(imported_blueprint, importer.ImportedBlueprint) + assert imported_blueprint.blueprint.domain == "automation" + assert imported_blueprint.suggested_filename == "example.org/someblueprint" + assert imported_blueprint.blueprint.metadata["source_url"] == url + + +def test_generic_importer_last() -> None: + """Test that generic importer is always the last one.""" + assert ( + importer.FETCH_FUNCTIONS.count(importer.fetch_blueprint_from_generic_url) == 1 + ) + assert importer.FETCH_FUNCTIONS[-1] == importer.fetch_blueprint_from_generic_url diff --git a/tests/components/bluesound/__init__.py b/tests/components/bluesound/__init__.py new file mode 100644 index 00000000000..f8a3701422e --- /dev/null +++ b/tests/components/bluesound/__init__.py @@ -0,0 +1 @@ +"""Tests for the Bluesound integration.""" diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py new file mode 100644 index 00000000000..02c73bcd62f --- /dev/null +++ b/tests/components/bluesound/conftest.py @@ -0,0 +1,103 @@ +"""Common fixtures for the Bluesound tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from pyblu import SyncStatus +import pytest + +from homeassistant.components.bluesound.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def sync_status() -> SyncStatus: + """Return a sync status object.""" + return SyncStatus( + etag="etag", + id="1.1.1.1:11000", + mac="00:11:22:33:44:55", + name="player-name", + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bluesound.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return a mocked config entry.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.2", + CONF_PORT: 11000, + }, + unique_id="00:11:22:33:44:55-11000", + ) + mock_entry.add_to_hass(hass) + + return mock_entry + + +@pytest.fixture +def mock_player() -> Generator[AsyncMock]: + """Mock the player.""" + with ( + patch( + "homeassistant.components.bluesound.Player", autospec=True + ) as mock_player, + patch( + "homeassistant.components.bluesound.config_flow.Player", + new=mock_player, + ), + ): + player = mock_player.return_value + player.__aenter__.return_value = player + player.status.return_value = None + player.sync_status.return_value = SyncStatus( + etag="etag", + id="1.1.1.1:11000", + mac="00:11:22:33:44:55", + name="player-name", + image="invalid_url", + initialized=True, + brand="brand", + model="model", + model_name="model-name", + volume_db=0.5, + volume=50, + group=None, + master=None, + slaves=None, + zone=None, + zone_master=None, + zone_slave=None, + mute_volume_db=None, + mute_volume=None, + ) + yield player diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py new file mode 100644 index 00000000000..32f36fcea58 --- /dev/null +++ b/tests/components/bluesound/test_config_flow.py @@ -0,0 +1,247 @@ +"""Test the Bluesound config flow.""" + +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError + +from homeassistant.components.bluesound.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + + +async def test_user_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + mock_player.sync_status.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_PORT: 11000, + } + + +async def test_user_flow_aleady_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "1.1.1.1", + CONF_PORT: 11000, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + mock_player.sync_status.assert_called_once() + + +async def test_import_flow_already_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={CONF_HOST: "1.1.1.1", CONF_PORT: 11000}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + mock_player.sync_status.assert_called_once() + + +async def test_zeroconf_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_player: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm" + + mock_setup_entry.assert_not_called() + mock_player.sync_status.assert_called_once() + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "player-name" + assert result["data"] == {CONF_HOST: "1.1.1.1", CONF_PORT: 11000} + assert result["result"].unique_id == "00:11:22:33:44:55-11000" + + mock_setup_entry.assert_called_once() + + +async def test_zeroconf_flow_cannot_connect( + hass: HomeAssistant, mock_player: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + mock_player.sync_status.side_effect = ClientConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + mock_player.sync_status.assert_called_once() + + +async def test_zeroconf_flow_already_configured( + hass: HomeAssistant, + mock_player: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test we handle already configured and update the host.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZeroconfServiceInfo( + ip_address="1.1.1.1", + ip_addresses=["1.1.1.1"], + port=11000, + hostname="player-name", + type="_musc._tcp.local.", + name="player-name._musc._tcp.local.", + properties={}, + ), + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_config_entry.data[CONF_HOST] == "1.1.1.1" + + mock_player.sync_status.assert_called_once() diff --git a/tests/components/bluetooth/conftest.py b/tests/components/bluetooth/conftest.py index 4373ec3f915..93a1c59cba1 100644 --- a/tests/components/bluetooth/conftest.py +++ b/tests/components/bluetooth/conftest.py @@ -1,12 +1,12 @@ """Tests for the bluetooth component.""" +from collections.abc import Generator from unittest.mock import patch from bleak_retry_connector import bleak_manager from dbus_fast.aio import message_bus import habluetooth.util as habluetooth_utils import pytest -from typing_extensions import Generator @pytest.fixture(name="disable_bluez_manager_socket", autouse=True, scope="package") diff --git a/tests/components/bluetooth/test_manager.py b/tests/components/bluetooth/test_manager.py index 4bff7cbe94d..0ac49aa72cd 100644 --- a/tests/components/bluetooth/test_manager.py +++ b/tests/components/bluetooth/test_manager.py @@ -1,5 +1,6 @@ """Tests for the Bluetooth integration manager.""" +from collections.abc import Generator from datetime import timedelta import time from typing import Any @@ -11,7 +12,6 @@ from bluetooth_adapters import AdvertisementHistory # pylint: disable-next=no-name-in-module from habluetooth.advertisement_tracker import TRACKER_BUFFERING_WOBBLE_SECONDS import pytest -from typing_extensions import Generator from homeassistant.components import bluetooth from homeassistant.components.bluetooth import ( diff --git a/tests/components/bluetooth/test_passive_update_processor.py b/tests/components/bluetooth/test_passive_update_processor.py index 8e1163c0bdb..079ac2200fc 100644 --- a/tests/components/bluetooth/test_passive_update_processor.py +++ b/tests/components/bluetooth/test_passive_update_processor.py @@ -1653,12 +1653,12 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( unregister_binary_sensor_processor() unregister_sensor_processor() - async with async_test_home_assistant() as hass: - await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + async with async_test_home_assistant() as test_hass: + await async_setup_component(test_hass, DOMAIN, {DOMAIN: {}}) current_entry.set(entry) coordinator = PassiveBluetoothProcessorCoordinator( - hass, + test_hass, _LOGGER, "aa:bb:cc:dd:ee:ff", BluetoothScanningMode.ACTIVE, @@ -1706,7 +1706,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( ] sensor_entity_one: PassiveBluetoothProcessorEntity = sensor_entities[0] - sensor_entity_one.hass = hass + sensor_entity_one.hass = test_hass assert sensor_entity_one.available is False # service data not injected assert sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-pressure" assert sensor_entity_one.device_info == { @@ -1723,7 +1723,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( binary_sensor_entity_one: PassiveBluetoothProcessorEntity = ( binary_sensor_entities[0] ) - binary_sensor_entity_one.hass = hass + binary_sensor_entity_one.hass = test_hass assert binary_sensor_entity_one.available is False # service data not injected assert binary_sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-motion" assert binary_sensor_entity_one.device_info == { @@ -1739,7 +1739,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart( cancel_coordinator() unregister_binary_sensor_processor() unregister_sensor_processor() - await hass.async_stop() + await test_hass.async_stop() NAMING_PASSIVE_BLUETOOTH_DATA_UPDATE = PassiveBluetoothDataUpdate( diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index 0c5645b3f71..5fc3d70c97a 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Iterator from contextlib import contextmanager from unittest.mock import patch @@ -27,7 +28,7 @@ from . import _get_manager, generate_advertisement_data, generate_ble_device @contextmanager -def mock_shutdown(manager: HomeAssistantBluetoothManager) -> None: +def mock_shutdown(manager: HomeAssistantBluetoothManager) -> Iterator[None]: """Mock shutdown of the HomeAssistantBluetoothManager.""" manager.shutdown = True yield diff --git a/tests/components/bmw_connected_drive/__init__.py b/tests/components/bmw_connected_drive/__init__.py index c11d5ef0021..655955ff9aa 100644 --- a/tests/components/bmw_connected_drive/__init__.py +++ b/tests/components/bmw_connected_drive/__init__.py @@ -1,6 +1,10 @@ """Tests for the for the BMW Connected Drive integration.""" -from bimmer_connected.const import REMOTE_SERVICE_BASE_URL, VEHICLE_CHARGING_BASE_URL +from bimmer_connected.const import ( + REMOTE_SERVICE_V4_BASE_URL, + VEHICLE_CHARGING_BASE_URL, + VEHICLE_POI_URL, +) import respx from homeassistant import config_entries @@ -67,10 +71,11 @@ def check_remote_service_call( first_remote_service_call: respx.models.Call = next( c for c in router.calls - if c.request.url.path.startswith(REMOTE_SERVICE_BASE_URL) + if c.request.url.path.startswith(REMOTE_SERVICE_V4_BASE_URL) or c.request.url.path.startswith( VEHICLE_CHARGING_BASE_URL.replace("/{vin}", "") ) + or c.request.url.path.endswith(VEHICLE_POI_URL.rsplit("/", maxsplit=1)[-1]) ) assert ( first_remote_service_call.request.url.path.endswith(remote_service) is True @@ -87,6 +92,10 @@ def check_remote_service_call( == remote_service_params ) + # Send POI doesn't return a status response, so we can't check it + if remote_service == "send-to-car": + return + # Now check final result last_event_status_call = next( c for c in reversed(router.calls) if c.request.url.path.endswith("eventStatus") diff --git a/tests/components/bmw_connected_drive/conftest.py b/tests/components/bmw_connected_drive/conftest.py index f69763dae77..7581b8c6f76 100644 --- a/tests/components/bmw_connected_drive/conftest.py +++ b/tests/components/bmw_connected_drive/conftest.py @@ -1,11 +1,12 @@ """Fixtures for BMW tests.""" +from collections.abc import Generator + from bimmer_connected.tests import ALL_CHARGING_SETTINGS, ALL_PROFILES, ALL_STATES from bimmer_connected.tests.common import MyBMWMockRouter from bimmer_connected.vehicle import remote_services import pytest import respx -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr index 610e194c0e5..c0462279e59 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_binary_sensor.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i3 (+ REX) Charging status', }), @@ -83,11 +82,8 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Check control messages', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_check_control_messages', @@ -133,17 +129,14 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2022-10-01', - 'car': 'i3 (+ REX)', 'device_class': 'problem', 'friendly_name': 'i3 (+ REX) Condition based services', 'vehicle_check': 'OK', 'vehicle_check_date': '2023-05-01', 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2023-05-01', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_condition_based_services', @@ -189,7 +182,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i3 (+ REX) Connection status', }), @@ -237,12 +229,9 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'lock', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Door lock state', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_door_lock_state', @@ -288,8 +277,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Lids', 'hood': 'CLOSED', @@ -299,7 +286,6 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_lids', @@ -345,7 +331,6 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Pre entry climatization', }), 'context': , @@ -392,13 +377,10 @@ # name: test_entity_state_attrs[binary_sensor.i3_rex_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'device_class': 'opening', 'friendly_name': 'i3 (+ REX) Windows', 'leftFront': 'CLOSED', 'rightFront': 'CLOSED', - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'binary_sensor.i3_rex_windows', @@ -444,7 +426,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'i4 eDrive40 Charging status', }), @@ -492,12 +473,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_check_control_messages', @@ -543,11 +521,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'i4 eDrive40', 'device_class': 'problem', 'friendly_name': 'i4 eDrive40 Condition based services', 'tire_wear_front': 'OK', @@ -558,7 +534,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_condition_based_services', @@ -604,7 +579,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'i4 eDrive40 Connection status', }), @@ -652,12 +626,9 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Door lock state', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_door_lock_state', @@ -703,8 +674,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Lids', 'hood': 'CLOSED', @@ -713,7 +682,6 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_lids', @@ -759,7 +727,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Pre entry climatization', }), 'context': , @@ -806,8 +773,6 @@ # name: test_entity_state_attrs[binary_sensor.i4_edrive40_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'device_class': 'opening', 'friendly_name': 'i4 eDrive40 Windows', 'leftFront': 'CLOSED', @@ -815,7 +780,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'binary_sensor.i4_edrive40_windows', @@ -861,7 +825,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery_charging', 'friendly_name': 'iX xDrive50 Charging status', }), @@ -909,12 +872,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_check_control_messages', @@ -960,11 +920,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'iX xDrive50', 'device_class': 'problem', 'friendly_name': 'iX xDrive50 Condition based services', 'tire_wear_front': 'OK', @@ -975,7 +933,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_condition_based_services', @@ -1021,7 +978,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_connection_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'plug', 'friendly_name': 'iX xDrive50 Connection status', }), @@ -1069,12 +1025,9 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Door lock state', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_door_lock_state', @@ -1120,8 +1073,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Lids', 'hood': 'CLOSED', @@ -1131,7 +1082,6 @@ 'rightRear': 'CLOSED', 'sunRoof': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_lids', @@ -1177,7 +1127,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_pre_entry_climatization-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Pre entry climatization', }), 'context': , @@ -1224,8 +1173,6 @@ # name: test_entity_state_attrs[binary_sensor.ix_xdrive50_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'device_class': 'opening', 'friendly_name': 'iX xDrive50 Windows', 'leftFront': 'CLOSED', @@ -1233,7 +1180,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'binary_sensor.ix_xdrive50_windows', @@ -1279,13 +1225,10 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_check_control_messages-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'problem', 'engine_oil': 'LOW', 'friendly_name': 'M340i xDrive Check control messages', 'tire_pressure': 'LOW', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_check_control_messages', @@ -1331,11 +1274,9 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_condition_based_services-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'brake_fluid': 'OK', 'brake_fluid_date': '2024-12-01', 'brake_fluid_distance': '50000 km', - 'car': 'M340i xDrive', 'device_class': 'problem', 'friendly_name': 'M340i xDrive Condition based services', 'oil': 'OK', @@ -1349,7 +1290,6 @@ 'vehicle_tuv': 'OK', 'vehicle_tuv_date': '2024-12-01', 'vehicle_tuv_distance': '50000 km', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_condition_based_services', @@ -1395,12 +1335,9 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_door_lock_state-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'lock', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Door lock state', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_door_lock_state', @@ -1446,8 +1383,6 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_lids-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Lids', 'hood': 'CLOSED', @@ -1456,7 +1391,6 @@ 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', 'trunk': 'CLOSED', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_lids', @@ -1502,8 +1436,6 @@ # name: test_entity_state_attrs[binary_sensor.m340i_xdrive_windows-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'device_class': 'opening', 'friendly_name': 'M340i xDrive Windows', 'leftFront': 'CLOSED', @@ -1511,7 +1443,6 @@ 'rear': 'CLOSED', 'rightFront': 'CLOSED', 'rightRear': 'CLOSED', - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'binary_sensor.m340i_xdrive_windows', diff --git a/tests/components/bmw_connected_drive/snapshots/test_button.ambr b/tests/components/bmw_connected_drive/snapshots/test_button.ambr index cd3f94c7e5e..f38441125ce 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_button.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_button.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[button.i3_rex_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Activate air conditioning', }), 'context': , @@ -82,7 +81,6 @@ # name: test_entity_state_attrs[button.i3_rex_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Find vehicle', }), 'context': , @@ -129,7 +127,6 @@ # name: test_entity_state_attrs[button.i3_rex_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Flash lights', }), 'context': , @@ -176,7 +173,6 @@ # name: test_entity_state_attrs[button.i3_rex_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Sound horn', }), 'context': , @@ -223,7 +219,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Activate air conditioning', }), 'context': , @@ -270,7 +265,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Deactivate air conditioning', }), 'context': , @@ -317,7 +311,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Find vehicle', }), 'context': , @@ -364,7 +357,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Flash lights', }), 'context': , @@ -411,7 +403,6 @@ # name: test_entity_state_attrs[button.i4_edrive40_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Sound horn', }), 'context': , @@ -458,7 +449,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Activate air conditioning', }), 'context': , @@ -505,7 +495,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Deactivate air conditioning', }), 'context': , @@ -552,7 +541,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Find vehicle', }), 'context': , @@ -599,7 +587,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Flash lights', }), 'context': , @@ -646,7 +633,6 @@ # name: test_entity_state_attrs[button.ix_xdrive50_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Sound horn', }), 'context': , @@ -693,7 +679,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_activate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Activate air conditioning', }), 'context': , @@ -740,7 +725,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_deactivate_air_conditioning-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Deactivate air conditioning', }), 'context': , @@ -787,7 +771,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_find_vehicle-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Find vehicle', }), 'context': , @@ -834,7 +817,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_flash_lights-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Flash lights', }), 'context': , @@ -881,7 +863,6 @@ # name: test_entity_state_attrs[button.m340i_xdrive_sound_horn-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Sound horn', }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr index 477cd24376d..81ef1220069 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_diagnostics.ambr @@ -232,16 +232,19 @@ }), 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -252,27 +255,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -287,11 +301,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -570,6 +618,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -660,6 +709,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1086,15 +1147,19 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -1105,37 +1170,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -1408,6 +1516,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -1498,6 +1607,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -1840,16 +1961,20 @@ }), 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -1867,31 +1992,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -2027,6 +2194,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -2113,6 +2281,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -2942,226 +3122,6 @@ }), ]), 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -3235,20 +3195,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3259,27 +3230,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -3294,11 +3276,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -3476,6 +3492,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -3566,6 +3583,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -3685,15 +3714,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -3704,37 +3737,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -3906,6 +3982,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -3996,6 +4073,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -4115,16 +4204,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -4142,31 +4235,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -4300,6 +4435,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -4386,6 +4522,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -5343,226 +5491,6 @@ 'vin': '**REDACTED**', }), 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -5636,20 +5564,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -5660,27 +5599,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -5695,11 +5645,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -5877,6 +5861,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -5967,6 +5952,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6086,15 +6083,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -6105,37 +6106,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6307,6 +6351,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6397,6 +6442,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -6516,16 +6573,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -6543,31 +6604,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -6701,6 +6804,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -6787,6 +6891,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7098,226 +7214,6 @@ dict({ 'data': None, 'fingerprint': list([ - dict({ - 'content': dict({ - 'capabilities': dict({ - 'climateFunction': 'AIR_CONDITIONING', - 'climateNow': True, - 'climateTimerTrigger': 'DEPARTURE_TIMER', - 'horn': True, - 'isBmwChargingSupported': True, - 'isCarSharingSupported': False, - 'isChargeNowForBusinessSupported': False, - 'isChargingHistorySupported': True, - 'isChargingHospitalityEnabled': False, - 'isChargingLoudnessEnabled': False, - 'isChargingPlanSupported': True, - 'isChargingPowerLimitEnabled': False, - 'isChargingSettingsEnabled': False, - 'isChargingTargetSocEnabled': False, - 'isClimateTimerSupported': True, - 'isCustomerEsimSupported': False, - 'isDCSContractManagementSupported': True, - 'isDataPrivacyEnabled': False, - 'isEasyChargeEnabled': False, - 'isEvGoChargingSupported': False, - 'isMiniChargingSupported': False, - 'isNonLscFeatureEnabled': False, - 'isRemoteEngineStartSupported': False, - 'isRemoteHistoryDeletionSupported': False, - 'isRemoteHistorySupported': True, - 'isRemoteParkingSupported': False, - 'isRemoteServicesActivationRequired': False, - 'isRemoteServicesBookingRequired': False, - 'isScanAndChargeSupported': False, - 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': False, - 'lastStateCallState': 'ACTIVATED', - 'lights': True, - 'lock': True, - 'remoteChargingCommands': dict({ - }), - 'sendPoi': True, - 'specialThemeSupport': list([ - ]), - 'unlock': True, - 'vehicleFinder': False, - 'vehicleStateSource': 'LAST_STATE_CALL', - }), - 'state': dict({ - 'chargingProfile': dict({ - 'chargingControlType': 'WEEKLY_PLANNER', - 'chargingMode': 'DELAYED_CHARGING', - 'chargingPreference': 'CHARGING_WINDOW', - 'chargingSettings': dict({ - 'hospitality': 'NO_ACTION', - 'idcc': 'NO_ACTION', - 'targetSoc': 100, - }), - 'climatisationOn': False, - 'departureTimes': list([ - dict({ - 'action': 'DEACTIVATE', - 'id': 1, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 35, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 2, - 'timeStamp': dict({ - 'hour': 18, - 'minute': 0, - }), - 'timerWeekDays': list([ - 'MONDAY', - 'TUESDAY', - 'WEDNESDAY', - 'THURSDAY', - 'FRIDAY', - 'SATURDAY', - 'SUNDAY', - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 3, - 'timeStamp': dict({ - 'hour': 7, - 'minute': 0, - }), - 'timerWeekDays': list([ - ]), - }), - dict({ - 'action': 'DEACTIVATE', - 'id': 4, - 'timerWeekDays': list([ - ]), - }), - ]), - 'reductionOfChargeCurrent': dict({ - 'end': dict({ - 'hour': 1, - 'minute': 30, - }), - 'start': dict({ - 'hour': 18, - 'minute': 1, - }), - }), - }), - 'checkControlMessages': list([ - ]), - 'climateTimers': list([ - dict({ - 'departureTime': dict({ - 'hour': 6, - 'minute': 40, - }), - 'isWeeklyTimer': True, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'THURSDAY', - 'SUNDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 12, - 'minute': 50, - }), - 'isWeeklyTimer': False, - 'timerAction': 'ACTIVATE', - 'timerWeekDays': list([ - 'MONDAY', - ]), - }), - dict({ - 'departureTime': dict({ - 'hour': 18, - 'minute': 59, - }), - 'isWeeklyTimer': True, - 'timerAction': 'DEACTIVATE', - 'timerWeekDays': list([ - 'WEDNESDAY', - ]), - }), - ]), - 'combustionFuelLevel': dict({ - 'range': 105, - 'remainingFuelLiters': 6, - }), - 'currentMileage': 137009, - 'doorsState': dict({ - 'combinedSecurityState': 'UNLOCKED', - 'combinedState': 'CLOSED', - 'hood': 'CLOSED', - 'leftFront': 'CLOSED', - 'leftRear': 'CLOSED', - 'rightFront': 'CLOSED', - 'rightRear': 'CLOSED', - 'trunk': 'CLOSED', - }), - 'driverPreferences': dict({ - 'lscPrivacyMode': 'OFF', - }), - 'electricChargingState': dict({ - 'chargingConnectionType': 'CONDUCTIVE', - 'chargingLevelPercent': 82, - 'chargingStatus': 'WAITING_FOR_CHARGING', - 'chargingTarget': 100, - 'isChargerConnected': True, - 'range': 174, - }), - 'isLeftSteering': True, - 'isLscSupported': True, - 'lastFetched': '2022-06-22T14:24:23.982Z', - 'lastUpdatedAt': '2022-06-22T13:58:52Z', - 'range': 174, - 'requiredServices': list([ - dict({ - 'dateTime': '2022-10-01T00:00:00.000Z', - 'description': 'Next service due by the specified date.', - 'status': 'OK', - 'type': 'BRAKE_FLUID', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next vehicle check due after the specified distance or date.', - 'status': 'OK', - 'type': 'VEHICLE_CHECK', - }), - dict({ - 'dateTime': '2023-05-01T00:00:00.000Z', - 'description': 'Next state inspection due by the specified date.', - 'status': 'OK', - 'type': 'VEHICLE_TUV', - }), - ]), - 'roofState': dict({ - 'roofState': 'CLOSED', - 'roofStateType': 'SUN_ROOF', - }), - 'windowsState': dict({ - 'combinedState': 'CLOSED', - 'leftFront': 'CLOSED', - 'rightFront': 'CLOSED', - }), - }), - }), - 'filename': 'bmw-eadrax-vcs_v4_vehicles_state_WBY0FINGERPRINT04.json', - }), dict({ 'content': dict({ 'chargeAndClimateSettings': dict({ @@ -7391,20 +7287,31 @@ }), 'filename': 'mini-eadrax-vcs_v5_vehicle-list.json', }), + dict({ + 'content': dict({ + 'gcid': 'ceb64158-d2ca-47e9-9ee6-cbffb881434e', + 'mappingInfos': list([ + ]), + }), + 'filename': 'toyota-eadrax-vcs_v5_vehicle-list.json', + }), dict({ 'content': dict({ 'capabilities': dict({ 'a4aType': 'BLUETOOTH', - 'checkSustainabilityDPP': False, + 'alarmSystem': True, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_2_UWB', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': True, }), 'horn': True, 'inCarCamera': True, + 'inCarCameraDwa': True, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7415,27 +7322,38 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, 'isCustomerEsimSupported': True, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ @@ -7450,11 +7368,45 @@ 'NOT_SUPPORTED', ]), }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + 'state': 'ACTIVATED', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + 'state': 'ACTIVATED', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + 'state': 'ACTIVATED', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, 'surroundViewRecorder': True, 'unlock': True, @@ -7632,6 +7584,7 @@ 'roofState': 'CLOSED', 'roofStateType': 'SUN_ROOF', }), + 'securityOverviewMode': 'ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -7722,6 +7675,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -7841,15 +7806,19 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'AIR_CONDITIONING', 'climateNow': True, 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': True, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': True, @@ -7860,37 +7829,80 @@ 'isChargingPowerLimitEnabled': True, 'isChargingSettingsEnabled': True, 'isChargingTargetSocEnabled': True, + 'isClimateTimerSupported': False, 'isClimateTimerWeeklyActive': False, - 'isCustomerEsimSupported': True, + 'isCustomerEsimSupported': False, 'isDCSContractManagementSupported': True, 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': True, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': True, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, + 'isThirdPartyAppStoreSupported': False, 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8062,6 +8074,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': 'NOT_ARMED', 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8152,6 +8165,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', @@ -8271,16 +8296,20 @@ 'content': dict({ 'capabilities': dict({ 'a4aType': 'NOT_SUPPORTED', - 'checkSustainabilityDPP': False, + 'alarmSystem': False, 'climateFunction': 'VENTILATION', 'climateNow': True, 'climateTimerTrigger': 'DEPARTURE_TIMER', 'digitalKey': dict({ 'bookedServicePackage': 'SMACC_1_5', + 'isDigitalKeyFirstSupported': False, 'readerGraphics': 'readerGraphics', 'state': 'ACTIVATED', + 'vehicleSoftwareUpgradeRequired': False, }), 'horn': True, + 'inCarCamera': False, + 'inCarCameraDwa': False, 'isBmwChargingSupported': False, 'isCarSharingSupported': False, 'isChargeNowForBusinessSupported': False, @@ -8298,31 +8327,73 @@ 'isDataPrivacyEnabled': False, 'isEasyChargeEnabled': False, 'isEvGoChargingSupported': False, + 'isLocationBasedChargingSettingsSupported': False, 'isMiniChargingSupported': False, 'isNonLscFeatureEnabled': False, 'isPersonalPictureUploadSupported': False, - 'isRemoteEngineStartSupported': False, + 'isPlugAndChargeSupported': False, + 'isRemoteEngineStartEnabled': False, + 'isRemoteEngineStartSupported': True, 'isRemoteHistoryDeletionSupported': False, 'isRemoteHistorySupported': True, + 'isRemoteParkingEes25Active': False, 'isRemoteParkingSupported': False, 'isRemoteServicesActivationRequired': False, 'isRemoteServicesBookingRequired': False, 'isScanAndChargeSupported': False, 'isSustainabilityAccumulatedViewEnabled': False, 'isSustainabilitySupported': False, - 'isWifiHotspotServiceSupported': True, + 'isThirdPartyAppStoreSupported': False, + 'isWifiHotspotServiceSupported': False, 'lastStateCallState': 'ACTIVATED', 'lights': True, + 'locationBasedCommerceFeatures': dict({ + 'fueling': False, + 'parking': False, + 'reservations': False, + }), 'lock': True, 'remote360': True, 'remoteChargingCommands': dict({ }), + 'remoteServices': dict({ + 'doorLock': dict({ + 'id': 'doorLock', + 'state': 'ACTIVATED', + }), + 'doorUnlock': dict({ + 'id': 'doorUnlock', + 'state': 'ACTIVATED', + }), + 'hornBlow': dict({ + 'id': 'hornBlow', + 'state': 'ACTIVATED', + }), + 'inCarCamera': dict({ + 'id': 'inCarCamera', + }), + 'inCarCameraDwa': dict({ + 'id': 'inCarCameraDwa', + }), + 'lightFlash': dict({ + 'id': 'lightFlash', + 'state': 'ACTIVATED', + }), + 'remote360': dict({ + 'id': 'remote360', + 'state': 'ACTIVATED', + }), + 'surroundViewRecorder': dict({ + 'id': 'surroundViewRecorder', + }), + }), 'remoteSoftwareUpgrade': True, 'sendPoi': True, 'specialThemeSupport': list([ ]), - 'speechThirdPartyAlexa': False, + 'speechThirdPartyAlexa': True, 'speechThirdPartyAlexaSDK': False, + 'surroundViewRecorder': False, 'unlock': True, 'vehicleFinder': True, 'vehicleStateSource': 'LAST_STATE_CALL', @@ -8456,6 +8527,7 @@ 'type': 'TIRE_WEAR_FRONT', }), ]), + 'securityOverviewMode': None, 'tireState': dict({ 'frontLeft': dict({ 'details': dict({ @@ -8542,6 +8614,18 @@ }), }), }), + 'vehicleSoftwareVersion': dict({ + 'iStep': dict({ + 'iStep': 0, + 'month': 0, + 'seriesCluster': '', + 'year': 0, + }), + 'puStep': dict({ + 'month': 0, + 'year': 0, + }), + }), 'windowsState': dict({ 'combinedState': 'CLOSED', 'leftFront': 'CLOSED', diff --git a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr index 17e6b118011..395c6e56dda 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_lock.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_lock.ambr @@ -35,12 +35,9 @@ # name: test_entity_state_attrs[lock.i3_rex_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i3 (+ REX)', 'door_lock_state': 'UNLOCKED', 'friendly_name': 'i3 (+ REX) Lock', 'supported_features': , - 'vin': 'WBY00000000REXI01', }), 'context': , 'entity_id': 'lock.i3_rex_lock', @@ -86,12 +83,9 @@ # name: test_entity_state_attrs[lock.i4_edrive40_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'i4 eDrive40', 'door_lock_state': 'LOCKED', 'friendly_name': 'i4 eDrive40 Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO02', }), 'context': , 'entity_id': 'lock.i4_edrive40_lock', @@ -137,12 +131,9 @@ # name: test_entity_state_attrs[lock.ix_xdrive50_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'iX xDrive50', 'door_lock_state': 'LOCKED', 'friendly_name': 'iX xDrive50 Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO01', }), 'context': , 'entity_id': 'lock.ix_xdrive50_lock', @@ -188,12 +179,9 @@ # name: test_entity_state_attrs[lock.m340i_xdrive_lock-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'car': 'M340i xDrive', 'door_lock_state': 'LOCKED', 'friendly_name': 'M340i xDrive Lock', 'supported_features': , - 'vin': 'WBA00000000DEMO03', }), 'context': , 'entity_id': 'lock.m340i_xdrive_lock', diff --git a/tests/components/bmw_connected_drive/snapshots/test_number.ambr b/tests/components/bmw_connected_drive/snapshots/test_number.ambr index f24ea43d8e8..71dbc46b454 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_number.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_number.ambr @@ -40,7 +40,6 @@ # name: test_entity_state_attrs[number.i4_edrive40_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Target SoC', 'max': 100.0, @@ -97,7 +96,6 @@ # name: test_entity_state_attrs[number.ix_xdrive50_target_soc-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Target SoC', 'max': 100.0, diff --git a/tests/components/bmw_connected_drive/snapshots/test_select.ambr b/tests/components/bmw_connected_drive/snapshots/test_select.ambr index 34a8817c8db..b827dfe478a 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_select.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_select.ambr @@ -8,6 +8,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -40,11 +41,11 @@ # name: test_entity_state_attrs[select.i3_rex_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , @@ -107,7 +108,6 @@ # name: test_entity_state_attrs[select.i4_edrive40_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 AC Charging Limit', 'options': list([ '6', @@ -143,6 +143,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -175,11 +176,11 @@ # name: test_entity_state_attrs[select.i4_edrive40_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , @@ -242,7 +243,6 @@ # name: test_entity_state_attrs[select.ix_xdrive50_ac_charging_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 AC Charging Limit', 'options': list([ '6', @@ -278,6 +278,7 @@ 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'config_entry_id': , @@ -310,11 +311,11 @@ # name: test_entity_state_attrs[select.ix_xdrive50_charging_mode-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging Mode', 'options': list([ 'immediate_charging', 'delayed_charging', + 'no_action', ]), }), 'context': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 6ba87c029ee..8a26acd1040 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -31,14 +31,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBY00000000REXI01-ac_current_limit', + 'unique_id': 'WBY00000000REXI01-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i3 (+ REX) AC current limit', 'unit_of_measurement': , @@ -80,14 +79,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBY00000000REXI01-charging_end_time', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging end time', }), @@ -128,14 +126,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBY00000000REXI01-charging_start_time', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i3 (+ REX) Charging start time', }), @@ -191,14 +188,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBY00000000REXI01-charging_status', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i3 (+ REX) Charging status', 'options': list([ @@ -256,14 +252,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBY00000000REXI01-charging_target', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Charging target', 'unit_of_measurement': '%', @@ -317,7 +312,6 @@ # name: test_entity_state_attrs[sensor.i3_rex_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Mileage', 'state_class': , @@ -365,14 +359,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBY00000000REXI01-remaining_battery_percent', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i3 (+ REX) Remaining battery percent', 'state_class': , @@ -413,22 +406,21 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBY00000000REXI01-remaining_fuel', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'i3 (+ REX) Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -475,14 +467,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBY00000000REXI01-remaining_fuel_percent', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i3 (+ REX) Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -529,14 +520,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBY00000000REXI01-remaining_range_electric', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range electric', 'state_class': , @@ -584,14 +574,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBY00000000REXI01-remaining_range_fuel', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range fuel', 'state_class': , @@ -639,14 +628,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBY00000000REXI01-remaining_range_total', + 'unique_id': 'WBY00000000REXI01-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i3_rex_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i3 (+ REX) Remaining range total', 'state_class': , @@ -692,14 +680,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO02-ac_current_limit', + 'unique_id': 'WBA00000000DEMO02-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'i4 eDrive40 AC current limit', 'unit_of_measurement': , @@ -741,14 +728,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO02-charging_end_time', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging end time', }), @@ -789,14 +775,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO02-charging_start_time', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'i4 eDrive40 Charging start time', }), @@ -852,14 +837,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO02-charging_status', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Charging status', 'options': list([ @@ -917,14 +901,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO02-charging_target', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Charging target', 'unit_of_measurement': '%', @@ -973,14 +956,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO02-activity', + 'unique_id': 'WBA00000000DEMO02-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'i4 eDrive40 Climate status', 'options': list([ @@ -998,6 +980,234 @@ 'state': 'heating', }) # --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.55', + }) +# --- # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1039,7 +1249,6 @@ # name: test_entity_state_attrs[sensor.i4_edrive40_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Mileage', 'state_class': , @@ -1053,6 +1262,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.03', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.24', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.03', + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO02-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.i4_edrive40_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'i4 eDrive40 Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.i4_edrive40_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.31', + }) +# --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1087,14 +1524,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO02-remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'i4 eDrive40 Remaining battery percent', 'state_class': , @@ -1142,14 +1578,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO02-remaining_range_electric', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range electric', 'state_class': , @@ -1197,14 +1632,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO02-remaining_range_total', + 'unique_id': 'WBA00000000DEMO02-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.i4_edrive40_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'i4 eDrive40 Remaining range total', 'state_class': , @@ -1250,14 +1684,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'ac_current_limit', - 'unique_id': 'WBA00000000DEMO01-ac_current_limit', + 'unique_id': 'WBA00000000DEMO01-charging_profile.ac_current_limit', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_ac_current_limit-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'current', 'friendly_name': 'iX xDrive50 AC current limit', 'unit_of_measurement': , @@ -1299,14 +1732,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_end_time', - 'unique_id': 'WBA00000000DEMO01-charging_end_time', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_end_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_end_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging end time', }), @@ -1347,14 +1779,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_start_time', - 'unique_id': 'WBA00000000DEMO01-charging_start_time', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_start_time', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_start_time-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'timestamp', 'friendly_name': 'iX xDrive50 Charging start time', }), @@ -1410,14 +1841,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_status', - 'unique_id': 'WBA00000000DEMO01-charging_status', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_status', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Charging status', 'options': list([ @@ -1475,14 +1905,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'charging_target', - 'unique_id': 'WBA00000000DEMO01-charging_target', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.charging_target', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_charging_target-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Charging target', 'unit_of_measurement': '%', @@ -1531,14 +1960,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO01-activity', + 'unique_id': 'WBA00000000DEMO01-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'iX xDrive50 Climate status', 'options': list([ @@ -1556,6 +1984,234 @@ 'state': 'inactive', }) # --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1597,7 +2253,6 @@ # name: test_entity_state_attrs[sensor.ix_xdrive50_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Mileage', 'state_class': , @@ -1611,6 +2266,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.61', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO01-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.ix_xdrive50_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'iX xDrive50 Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ix_xdrive50_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.69', + }) +# --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1645,14 +2528,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_battery_percent', - 'unique_id': 'WBA00000000DEMO01-remaining_battery_percent', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_battery_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_battery_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'battery', 'friendly_name': 'iX xDrive50 Remaining battery percent', 'state_class': , @@ -1700,14 +2582,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_electric', - 'unique_id': 'WBA00000000DEMO01-remaining_range_electric', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_electric', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_electric-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range electric', 'state_class': , @@ -1755,14 +2636,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO01-remaining_range_total', + 'unique_id': 'WBA00000000DEMO01-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.ix_xdrive50_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'iX xDrive50 Remaining range total', 'state_class': , @@ -1812,14 +2692,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'climate_status', - 'unique_id': 'WBA00000000DEMO03-activity', + 'unique_id': 'WBA00000000DEMO03-climate.activity', 'unit_of_measurement': None, }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_climate_status-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'enum', 'friendly_name': 'M340i xDrive Climate status', 'options': list([ @@ -1837,6 +2716,234 @@ 'state': 'inactive', }) # --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_left_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.41', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'front_right_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.front_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_front_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Front right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_front_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.55', + }) +# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1878,7 +2985,6 @@ # name: test_entity_state_attrs[sensor.m340i_xdrive_mileage-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Mileage', 'state_class': , @@ -1892,6 +2998,234 @@ 'state': '1121', }) # --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_left.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear left target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_left_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear left tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_left_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_left.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_left_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear left tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_left_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.24', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right target pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_target_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_right.target_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_target_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear right target pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_right_target_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear right tire pressure', + 'platform': 'bmw_connected_drive', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'rear_right_current_pressure', + 'unique_id': 'WBA00000000DEMO03-tires.rear_right.current_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_state_attrs[sensor.m340i_xdrive_rear_right_tire_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'M340i xDrive Rear right tire pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.m340i_xdrive_rear_right_tire_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.31', + }) +# --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1919,22 +3253,21 @@ 'suggested_display_precision': 0, }), }), - 'original_device_class': , + 'original_device_class': , 'original_icon': None, 'original_name': 'Remaining fuel', 'platform': 'bmw_connected_drive', 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel', - 'unique_id': 'WBA00000000DEMO03-remaining_fuel', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', - 'device_class': 'volume', + 'device_class': 'volume_storage', 'friendly_name': 'M340i xDrive Remaining fuel', 'state_class': , 'unit_of_measurement': , @@ -1981,14 +3314,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_fuel_percent', - 'unique_id': 'WBA00000000DEMO03-remaining_fuel_percent', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_fuel_percent', 'unit_of_measurement': '%', }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_fuel_percent-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Remaining fuel percent', 'state_class': , 'unit_of_measurement': '%', @@ -2035,14 +3367,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_fuel', - 'unique_id': 'WBA00000000DEMO03-remaining_range_fuel', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_fuel', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_fuel-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range fuel', 'state_class': , @@ -2090,14 +3421,13 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'remaining_range_total', - 'unique_id': 'WBA00000000DEMO03-remaining_range_total', + 'unique_id': 'WBA00000000DEMO03-fuel_and_battery.remaining_range_total', 'unit_of_measurement': , }) # --- # name: test_entity_state_attrs[sensor.m340i_xdrive_remaining_range_total-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'device_class': 'distance', 'friendly_name': 'M340i xDrive Remaining range total', 'state_class': , diff --git a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr index 5a87a6ddd84..5b60a32c3be 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_switch.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_switch.ambr @@ -35,7 +35,6 @@ # name: test_entity_state_attrs[switch.i4_edrive40_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'i4 eDrive40 Climate', }), 'context': , @@ -82,7 +81,6 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_charging-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Charging', }), 'context': , @@ -129,7 +127,6 @@ # name: test_entity_state_attrs[switch.ix_xdrive50_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'iX xDrive50 Climate', }), 'context': , @@ -176,7 +173,6 @@ # name: test_entity_state_attrs[switch.m340i_xdrive_climate-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by MyBMW', 'friendly_name': 'M340i xDrive Climate', }), 'context': , diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index 3c7f452a011..f346cd70b26 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -159,7 +159,7 @@ async def test_options_flow_implementation(hass: HomeAssistant) -> None: CONF_READ_ONLY: True, } - assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 2 async def test_reauth(hass: HomeAssistant) -> None: @@ -210,4 +210,4 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result2["reason"] == "reauth_successful" assert config_entry.data == FIXTURE_COMPLETE_ENTRY - assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 2 diff --git a/tests/components/bmw_connected_drive/test_coordinator.py b/tests/components/bmw_connected_drive/test_coordinator.py index 5b3f99a9414..b0f507bbfc2 100644 --- a/tests/components/bmw_connected_drive/test_coordinator.py +++ b/tests/components/bmw_connected_drive/test_coordinator.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.bmw_connected_drive import DOMAIN as BMW_DOMAIN -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.update_coordinator import UpdateFailed @@ -27,10 +27,7 @@ async def test_update_success(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - hass.data[config_entry.domain][config_entry.entry_id].last_update_success - is True - ) + assert config_entry.runtime_data.coordinator.last_update_success is True @pytest.mark.usefixtures("bmw_fixture") @@ -45,7 +42,7 @@ async def test_update_failed( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[config_entry.domain][config_entry.entry_id] + coordinator = config_entry.runtime_data.coordinator assert coordinator.last_update_success is True @@ -74,7 +71,7 @@ async def test_update_reauth( await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[config_entry.domain][config_entry.entry_id] + coordinator = config_entry.runtime_data.coordinator assert coordinator.last_update_success is True @@ -121,6 +118,7 @@ async def test_init_reauth( await hass.async_block_till_done() reauth_issue = issue_registry.async_get_issue( - HA_DOMAIN, f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}" + HOMEASSISTANT_DOMAIN, + f"config_entry_reauth_{BMW_DOMAIN}_{config_entry.entry_id}", ) assert reauth_issue.active is True diff --git a/tests/components/bmw_connected_drive/test_init.py b/tests/components/bmw_connected_drive/test_init.py index 5cd6362d6fa..e523b2b3d02 100644 --- a/tests/components/bmw_connected_drive/test_init.py +++ b/tests/components/bmw_connected_drive/test_init.py @@ -85,7 +85,7 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-remaining_battery_percent", + f"{VIN}-fuel_and_battery.remaining_battery_percent", ), ( { @@ -96,7 +96,18 @@ async def test_migrate_options_from_data(hass: HomeAssistant) -> None: "disabled_by": None, }, f"{VIN}-remaining_range_total", - f"{VIN}-remaining_range_total", + f"{VIN}-fuel_and_battery.remaining_range_total", + ), + ( + { + "domain": SENSOR_DOMAIN, + "platform": BMW_DOMAIN, + "unique_id": f"{VIN}-mileage", + "suggested_object_id": f"{VEHICLE_NAME} mileage", + "disabled_by": None, + }, + f"{VIN}-mileage", + f"{VIN}-mileage", ), ], ) @@ -143,7 +154,7 @@ async def test_migrate_unique_ids( "disabled_by": None, }, f"{VIN}-charging_level_hv", - f"{VIN}-remaining_battery_percent", + f"{VIN}-fuel_and_battery.remaining_battery_percent", ), ], ) @@ -163,8 +174,8 @@ async def test_dont_migrate_unique_ids( existing_entity = entity_registry.async_get_or_create( SENSOR_DOMAIN, BMW_DOMAIN, - unique_id=f"{VIN}-remaining_battery_percent", - suggested_object_id=f"{VEHICLE_NAME} remaining_battery_percent", + unique_id=f"{VIN}-fuel_and_battery.remaining_battery_percent", + suggested_object_id=f"{VEHICLE_NAME} fuel_and_battery.remaining_battery_percent", config_entry=mock_config_entry, ) diff --git a/tests/components/bmw_connected_drive/test_notify.py b/tests/components/bmw_connected_drive/test_notify.py new file mode 100644 index 00000000000..4113f618be0 --- /dev/null +++ b/tests/components/bmw_connected_drive/test_notify.py @@ -0,0 +1,151 @@ +"""Test BMW numbers.""" + +from unittest.mock import AsyncMock + +from bimmer_connected.models import MyBMWAPIError, MyBMWRemoteServiceError +from bimmer_connected.tests.common import POI_DATA +from bimmer_connected.vehicle.remote_services import RemoteServices +import pytest +import respx + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import check_remote_service_call, setup_mocked_integration + + +async def test_legacy_notify_service_simple( + hass: HomeAssistant, + bmw_fixture: respx.Router, +) -> None: + """Test successful sending of POIs.""" + + # Setup component + assert await setup_mocked_integration(hass) + + # Minimal required data + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + }, + }, + blocking=True, + ) + check_remote_service_call(bmw_fixture, "send-to-car") + + bmw_fixture.reset() + + # Full data + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + "street": POI_DATA.get("street"), + "city": POI_DATA.get("city"), + "postal_code": POI_DATA.get("postal_code"), + "country": POI_DATA.get("country"), + }, + }, + blocking=True, + ) + check_remote_service_call(bmw_fixture, "send-to-car") + + +@pytest.mark.usefixtures("bmw_fixture") +@pytest.mark.parametrize( + ("data", "exc_translation"), + [ + ( + { + "latitude": POI_DATA.get("lat"), + }, + "Invalid data for point of interest: required key not provided @ data['longitude']", + ), + ( + { + "latitude": POI_DATA.get("lat"), + "longitude": "text", + }, + "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + ), + ( + { + "latitude": POI_DATA.get("lat"), + "longitude": 9999, + }, + "Invalid data for point of interest: invalid longitude for dictionary value @ data['longitude']", + ), + ], +) +async def test_service_call_invalid_input( + hass: HomeAssistant, + data: dict, + exc_translation: str, +) -> None: + """Test invalid inputs.""" + + # Setup component + assert await setup_mocked_integration(hass) + + with pytest.raises(ServiceValidationError) as exc: + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": data, + }, + blocking=True, + ) + assert str(exc.value) == exc_translation + + +@pytest.mark.usefixtures("bmw_fixture") +@pytest.mark.parametrize( + ("raised", "expected"), + [ + (MyBMWRemoteServiceError, HomeAssistantError), + (MyBMWAPIError, HomeAssistantError), + ], +) +async def test_service_call_fail( + hass: HomeAssistant, + raised: Exception, + expected: Exception, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test exception handling.""" + + # Setup component + assert await setup_mocked_integration(hass) + + # Setup exception + monkeypatch.setattr( + RemoteServices, + "trigger_remote_service", + AsyncMock(side_effect=raised), + ) + + # Test + with pytest.raises(expected): + await hass.services.async_call( + "notify", + "bmw_connected_drive_ix_xdrive50", + { + "message": POI_DATA.get("name"), + "data": { + "latitude": POI_DATA.get("lat"), + "longitude": POI_DATA.get("lon"), + }, + }, + blocking=True, + ) diff --git a/tests/components/braviatv/conftest.py b/tests/components/braviatv/conftest.py index 186f4e12337..b25e8ddf067 100644 --- a/tests/components/braviatv/conftest.py +++ b/tests/components/braviatv/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Bravia TV.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/braviatv/test_diagnostics.py b/tests/components/braviatv/test_diagnostics.py index 13f6c92fb76..a7bd1631788 100644 --- a/tests/components/braviatv/test_diagnostics.py +++ b/tests/components/braviatv/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.braviatv.const import CONF_USE_PSK, DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PIN @@ -71,4 +72,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 25330c10ba4..6c39c5020f9 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the Bring! tests.""" +from collections.abc import Generator from typing import cast from unittest.mock import AsyncMock, patch from bring_api.types import BringAuthResponse import pytest -from typing_extensions import Generator from homeassistant.components.bring import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index c9245fb16fa..61ef27815fd 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -89,6 +89,16 @@ BROADLINK_DEVICES = { 57, 5, ), + "Guest room": ( + "192.168.0.66", + "34ea34b61d2e", + "HY02/HY03", + "Hysen", + "HYS", + 0x4EAD, + 10024, + 5, + ), } @@ -168,6 +178,31 @@ class BroadlinkDevice: } +class BroadlinkMP1BG1Device(BroadlinkDevice): + """Mock device for MP1 and BG1 with special mocking of api return values.""" + + def get_mock_api(self): + """Return a mock device (API) with support for check_power calls.""" + mock_api = super().get_mock_api() + mock_api.check_power.return_value = {"s1": 0, "s2": 0, "s3": 0, "s4": 0} + return mock_api + + +class BroadlinkSP4BDevice(BroadlinkDevice): + """Mock device for SP4b with special mocking of api return values.""" + + def get_mock_api(self): + """Return a mock device (API) with support for get_state calls.""" + mock_api = super().get_mock_api() + mock_api.get_state.return_value = {"pwr": 0} + return mock_api + + def get_device(name): """Get a device by name.""" + dev_type = BROADLINK_DEVICES[name][5] + if dev_type in {0x4EB5}: + return BroadlinkMP1BG1Device(name, *BROADLINK_DEVICES[name]) + if dev_type in {0x5115}: + return BroadlinkSP4BDevice(name, *BROADLINK_DEVICES[name]) return BroadlinkDevice(name, *BROADLINK_DEVICES[name]) diff --git a/tests/components/broadlink/test_climate.py b/tests/components/broadlink/test_climate.py new file mode 100644 index 00000000000..6b39d1895b1 --- /dev/null +++ b/tests/components/broadlink/test_climate.py @@ -0,0 +1,180 @@ +"""Tests for Broadlink climate.""" + +from typing import Any + +import pytest + +from homeassistant.components.broadlink.climate import SensorMode +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.climate import ( + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + HVACAction, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +@pytest.mark.parametrize( + ( + "api_return_value", + "expected_state", + "expected_current_temperature", + "expected_temperature", + "expected_hvac_action", + ), + [ + ( + { + "sensor": SensorMode.INNER_SENSOR_CONTROL.value, + "power": 1, + "auto_mode": 0, + "active": 1, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.HEAT, + 22, + 23, + HVACAction.HEATING, + ), + ( + { + "sensor": SensorMode.OUTER_SENSOR_CONTROL.value, + "power": 1, + "auto_mode": 1, + "active": 0, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.AUTO, + 30, + 23, + HVACAction.IDLE, + ), + ( + { + "sensor": SensorMode.INNER_SENSOR_CONTROL.value, + "power": 0, + "auto_mode": 0, + "active": 0, + "room_temp": 22, + "thermostat_temp": 23, + "external_temp": 30, + }, + HVACMode.OFF, + 22, + 23, + HVACAction.OFF, + ), + ], +) +async def test_climate( + api_return_value: dict[str, Any], + expected_state: HVACMode, + expected_current_temperature: int, + expected_temperature: int, + expected_hvac_action: HVACAction, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink climate.""" + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] + assert len(climates) == 1 + + climate = climates[0] + + mock_setup.api.get_full_status.return_value = api_return_value + + await async_update_entity(hass, climate.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(climate.entity_id) + assert state.state == expected_state + assert state.attributes["current_temperature"] == expected_current_temperature + assert state.attributes["temperature"] == expected_temperature + assert state.attributes["hvac_action"] == expected_hvac_action + + +async def test_climate_set_temperature_turn_off_turn_on( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink climate.""" + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + climates = [entry for entry in entries if entry.domain == Platform.CLIMATE] + assert len(climates) == 1 + + climate = climates[0] + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: climate.entity_id, + ATTR_TEMPERATURE: "24", + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 0 + assert mock_setup.api.set_mode.call_count == 0 + assert state.attributes["temperature"] == 24 + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: climate.entity_id, + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 1 + assert mock_setup.api.set_mode.call_count == 0 + assert state.state == HVACMode.OFF + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: climate.entity_id, + }, + blocking=True, + ) + state = hass.states.get(climate.entity_id) + + assert mock_setup.api.set_temp.call_count == 1 + assert mock_setup.api.set_power.call_count == 2 + assert mock_setup.api.set_mode.call_count == 1 + assert state.state == HVACMode.HEAT diff --git a/tests/components/broadlink/test_select.py b/tests/components/broadlink/test_select.py new file mode 100644 index 00000000000..42715c9a5ab --- /dev/null +++ b/tests/components/broadlink/test_select.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink select.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.select import ( + ATTR_OPTION, + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_select( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink select.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + selects = [entry for entry in entries if entry.domain == Platform.SELECT] + assert len(selects) == 1 + + select = selects[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, select.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(select.entity_id) + assert state.state == "wednesday" + + # set value + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: select.entity_id, + ATTR_OPTION: "tuesday", + }, + blocking=True, + ) + state = hass.states.get(select.entity_id) + assert state.state == "tuesday" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 2, + "minute": 3, + "second": 4, + "day": 2, + } diff --git a/tests/components/broadlink/test_time.py b/tests/components/broadlink/test_time.py new file mode 100644 index 00000000000..819954158bb --- /dev/null +++ b/tests/components/broadlink/test_time.py @@ -0,0 +1,67 @@ +"""Tests for Broadlink time.""" + +from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity + +from . import get_device + + +async def test_time( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test Broadlink time.""" + await hass.config.async_set_time_zone("UTC") + + device = get_device("Guest room") + mock_setup = await device.setup_entry(hass) + + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_setup.entry.unique_id)} + ) + entries = er.async_entries_for_device(entity_registry, device_entry.id) + times = [entry for entry in entries if entry.domain == Platform.TIME] + assert len(times) == 1 + + time = times[0] + + mock_setup.api.get_full_status.return_value = { + "dayofweek": 3, + "hour": 2, + "min": 3, + "sec": 4, + } + await async_update_entity(hass, time.entity_id) + assert mock_setup.api.get_full_status.call_count == 2 + state = hass.states.get(time.entity_id) + assert state.state == "02:03:04+00:00" + + # set value + await hass.services.async_call( + TIME_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: time.entity_id, + ATTR_TIME: "03:04:05", + }, + blocking=True, + ) + state = hass.states.get(time.entity_id) + assert state.state == "03:04:05" + assert mock_setup.api.set_time.call_count == 1 + call_args = mock_setup.api.set_time.call_args.kwargs + assert call_args == { + "hour": 3, + "minute": 4, + "second": 5, + "day": 3, + } diff --git a/tests/components/brother/conftest.py b/tests/components/brother/conftest.py index 5fadca5314d..de22158da00 100644 --- a/tests/components/brother/conftest.py +++ b/tests/components/brother/conftest.py @@ -1,11 +1,11 @@ """Test fixtures for brother.""" +from collections.abc import Generator from datetime import UTC, datetime -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from brother import BrotherSensors import pytest -from typing_extensions import Generator from homeassistant.components.brother.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TYPE @@ -87,7 +87,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_unload_entry() -> Generator[AsyncMock, None, None]: +def mock_unload_entry() -> Generator[AsyncMock]: """Override async_unload_entry.""" with patch( "homeassistant.components.brother.async_unload_entry", return_value=True @@ -96,7 +96,7 @@ def mock_unload_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_brother_client() -> Generator[AsyncMock, None, None]: +def mock_brother_client() -> Generator[MagicMock]: """Mock Brother client.""" with ( patch("homeassistant.components.brother.Brother", autospec=True) as mock_client, diff --git a/tests/components/brottsplatskartan/conftest.py b/tests/components/brottsplatskartan/conftest.py index c10093f18b9..1d0cf236ed9 100644 --- a/tests/components/brottsplatskartan/conftest.py +++ b/tests/components/brottsplatskartan/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for Brottplatskartan.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/brunt/conftest.py b/tests/components/brunt/conftest.py index bfbca238446..1b60db682c3 100644 --- a/tests/components/brunt/conftest.py +++ b/tests/components/brunt/conftest.py @@ -1,9 +1,9 @@ """Configuration for brunt tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/bryant_evolution/__init__.py b/tests/components/bryant_evolution/__init__.py new file mode 100644 index 00000000000..22fa2950253 --- /dev/null +++ b/tests/components/bryant_evolution/__init__.py @@ -0,0 +1 @@ +"""Tests for the Bryant Evolution integration.""" diff --git a/tests/components/bryant_evolution/conftest.py b/tests/components/bryant_evolution/conftest.py new file mode 100644 index 00000000000..cc9dfbec1e1 --- /dev/null +++ b/tests/components/bryant_evolution/conftest.py @@ -0,0 +1,70 @@ +"""Common fixtures for the Bryant Evolution tests.""" + +from collections.abc import Generator, Mapping +from unittest.mock import AsyncMock, patch + +from evolutionhttp import BryantEvolutionLocalClient +import pytest + +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.bryant_evolution.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +DEFAULT_SYSTEM_ZONES = ((1, 1), (1, 2), (2, 3)) +""" +A tuple of (system, zone) pairs representing the default system and zone configurations +for the Bryant Evolution integration. +""" + + +@pytest.fixture(autouse=True) +def mock_evolution_client_factory() -> Generator[AsyncMock, None, None]: + """Mock an Evolution client.""" + with patch( + "evolutionhttp.BryantEvolutionLocalClient.get_client", + austospec=True, + ) as mock_get_client: + clients: Mapping[tuple[int, int], AsyncMock] = {} + for system, zone in DEFAULT_SYSTEM_ZONES: + clients[(system, zone)] = AsyncMock(spec=BryantEvolutionLocalClient) + client = clients[system, zone] + client.read_zone_name.return_value = f"System {system} Zone {zone}" + client.read_current_temperature.return_value = 75 + client.read_hvac_mode.return_value = ("COOL", False) + client.read_fan_mode.return_value = "AUTO" + client.read_cooling_setpoint.return_value = 72 + mock_get_client.side_effect = lambda system, zone, tty: clients[ + (system, zone) + ] + yield mock_get_client + + +@pytest.fixture +async def mock_evolution_entry( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, +) -> MockConfigEntry: + """Configure and return a Bryant evolution integration.""" + hass.config.units = US_CUSTOMARY_SYSTEM + entry = MockConfigEntry( + entry_id="01J3XJZSTEF6G5V0QJX6HBC94T", # For determinism in snapshot tests + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry diff --git a/tests/components/bryant_evolution/snapshots/test_climate.ambr b/tests/components/bryant_evolution/snapshots/test_climate.ambr new file mode 100644 index 00000000000..4f6c1f2bbc4 --- /dev/null +++ b/tests/components/bryant_evolution/snapshots/test_climate.ambr @@ -0,0 +1,83 @@ +# serializer version: 1 +# name: test_setup_integration_success[climate.system_1_zone_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'auto', + 'low', + 'med', + 'high', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 95, + 'min_temp': 45, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.system_1_zone_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bryant_evolution', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J3XJZSTEF6G5V0QJX6HBC94T-S1-Z1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_integration_success[climate.system_1_zone_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 75, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'auto', + 'low', + 'med', + 'high', + ]), + 'friendly_name': 'System 1 Zone 1', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 95, + 'min_temp': 45, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': 72, + }), + 'context': , + 'entity_id': 'climate.system_1_zone_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/bryant_evolution/test_climate.py b/tests/components/bryant_evolution/test_climate.py new file mode 100644 index 00000000000..42944c32bc2 --- /dev/null +++ b/tests/components/bryant_evolution/test_climate.py @@ -0,0 +1,259 @@ +"""Test the BryantEvolutionClient type.""" + +from collections.abc import Generator +from datetime import timedelta +import logging +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.bryant_evolution.climate import SCAN_INTERVAL +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_ACTION, + ATTR_HVAC_MODE, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACAction, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +_LOGGER = logging.getLogger(__name__) + + +async def trigger_polling(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> None: + """Trigger a polling event.""" + freezer.tick(SCAN_INTERVAL + timedelta(seconds=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + +async def test_setup_integration_success( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_evolution_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an instance can be constructed.""" + await snapshot_platform( + hass, entity_registry, snapshot, mock_evolution_entry.entry_id + ) + + +async def test_set_temperature_mode_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in cool mode.""" + # Start with known initial conditions + client = await mock_evolution_client_factory(1, 1, "/dev/unused") + client.read_hvac_mode.return_value = ("COOL", False) + client.read_cooling_setpoint.return_value = 75 + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 75, state.attributes + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {ATTR_TEMPERATURE: 70} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + client.read_cooling_setpoint.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + + # Verify effect. + client.set_cooling_setpoint.assert_called_once_with(70) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 70 + + +async def test_set_temperature_mode_heat( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in heat mode.""" + + # Start with known initial conditions + client = await mock_evolution_client_factory(1, 1, "/dev/unused") + client.read_hvac_mode.return_value = ("HEAT", False) + client.read_heating_setpoint.return_value = 60 + await trigger_polling(hass, freezer) + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {"temperature": 65} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + client.read_heating_setpoint.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + # Verify effect. + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["temperature"] == 65, state.attributes + + +async def test_set_temperature_mode_heat_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + freezer: FrozenDateTimeFactory, +) -> None: + """Test setting the temperature in heat_cool mode.""" + + # Enter heat_cool with known setpoints + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_hvac_mode.return_value = ("AUTO", False) + mock_client.read_cooling_setpoint.return_value = 90 + mock_client.read_heating_setpoint.return_value = 40 + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.state == "heat_cool" + assert state.attributes["target_temp_low"] == 40 + assert state.attributes["target_temp_high"] == 90 + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + mock_client.read_heating_setpoint.side_effect = Exception("fake failure") + mock_client.read_cooling_setpoint.side_effect = Exception("fake failure") + data = {"target_temp_low": 70, "target_temp_high": 80} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, data, blocking=True + ) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes["target_temp_low"] == 70, state.attributes + assert state.attributes["target_temp_high"] == 80, state.attributes + mock_client.set_cooling_setpoint.assert_called_once_with(80) + mock_client.set_heating_setpoint.assert_called_once_with(70) + + +async def test_set_fan_mode( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], +) -> None: + """Test that setting fan mode works.""" + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + fan_modes = ["auto", "low", "med", "high"] + for mode in fan_modes: + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + mock_client.read_fan_mode.side_effect = Exception("fake failure") + data = {ATTR_FAN_MODE: mode} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, data, blocking=True + ) + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_FAN_MODE] == mode + ) + mock_client.set_fan_mode.assert_called_with(mode) + + +@pytest.mark.parametrize( + ("hvac_mode", "evolution_mode"), + [("heat_cool", "auto"), ("heat", "heat"), ("cool", "cool"), ("off", "off")], +) +async def test_set_hvac_mode( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + hvac_mode, + evolution_mode, +) -> None: + """Test that setting HVAC mode works.""" + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + + # Make the call, modifting the mock client to throw an exception on + # read to ensure that the update is visible iff we call + # async_update_ha_state. + data = {ATTR_HVAC_MODE: hvac_mode} + data[ATTR_ENTITY_ID] = "climate.system_1_zone_1" + mock_client.read_hvac_mode.side_effect = Exception("fake failure") + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, data, blocking=True + ) + await hass.async_block_till_done() + assert hass.states.get("climate.system_1_zone_1").state == evolution_mode + mock_client.set_hvac_mode.assert_called_with(evolution_mode) + + +@pytest.mark.parametrize( + ("curr_temp", "expected_action"), + [(62, HVACAction.HEATING), (70, HVACAction.OFF), (80, HVACAction.COOLING)], +) +async def test_read_hvac_action_heat_cool( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + freezer: FrozenDateTimeFactory, + curr_temp: int, + expected_action: HVACAction, +) -> None: + """Test that we can read the current HVAC action in heat_cool mode.""" + htsp = 68 + clsp = 72 + + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_heating_setpoint.return_value = htsp + mock_client.read_cooling_setpoint.return_value = clsp + is_active = curr_temp < htsp or curr_temp > clsp + mock_client.read_hvac_mode.return_value = ("auto", is_active) + mock_client.read_current_temperature.return_value = curr_temp + await trigger_polling(hass, freezer) + state = hass.states.get("climate.system_1_zone_1") + assert state.attributes[ATTR_HVAC_ACTION] == expected_action + + +@pytest.mark.parametrize( + ("mode", "active", "expected_action"), + [ + ("heat", True, "heating"), + ("heat", False, "off"), + ("cool", True, "cooling"), + ("cool", False, "off"), + ("off", False, "off"), + ], +) +async def test_read_hvac_action( + hass: HomeAssistant, + mock_evolution_entry: MockConfigEntry, + mock_evolution_client_factory: Generator[AsyncMock, None, None], + freezer: FrozenDateTimeFactory, + mode: str, + active: bool, + expected_action: str, +) -> None: + """Test that we can read the current HVAC action.""" + # Initial state should be no action. + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] + == HVACAction.OFF + ) + # Perturb the system and verify we see an action. + mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") + mock_client.read_heating_setpoint.return_value = 75 # Needed if mode == heat + mock_client.read_hvac_mode.return_value = (mode, active) + await trigger_polling(hass, freezer) + assert ( + hass.states.get("climate.system_1_zone_1").attributes[ATTR_HVAC_ACTION] + == expected_action + ) diff --git a/tests/components/bryant_evolution/test_config_flow.py b/tests/components/bryant_evolution/test_config_flow.py new file mode 100644 index 00000000000..39d203201eb --- /dev/null +++ b/tests/components/bryant_evolution/test_config_flow.py @@ -0,0 +1,170 @@ +"""Test the Bryant Evolution config flow.""" + +from unittest.mock import DEFAULT, AsyncMock, patch + +from evolutionhttp import BryantEvolutionLocalClient, ZoneInfo + +from homeassistant import config_entries +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], + 2: [ZoneInfo(2, 3, "S2Z2"), ZoneInfo(2, 4, "S2Z3")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_form_success", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY, result + assert result["title"] == "SAM at test_form_success" + assert result["data"] == { + CONF_FILENAME: "test_form_success", + CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.return_value = [] + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_form_cannot_connect", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1"), ZoneInfo(1, 2, "S1Z2")], + 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "some-serial", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SAM at some-serial" + assert result["data"] == { + CONF_FILENAME: "some-serial", + CONF_SYSTEM_ZONE: [(1, 1), (1, 2), (2, 3), (2, 4)], + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect_bad_file( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_evolution_client_factory: AsyncMock, +) -> None: + """Test we handle cannot connect error from a missing file.""" + mock_evolution_client_factory.side_effect = FileNotFoundError("test error") + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + # This file does not exist. + CONF_FILENAME: "test_form_cannot_connect_bad_file", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + +async def test_reconfigure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_evolution_entry: MockConfigEntry, +) -> None: + """Test that reconfigure discovers additional systems and zones.""" + + # Reconfigure with additional systems and zones. + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": mock_evolution_entry.entry_id, + }, + ) + with ( + patch.object( + BryantEvolutionLocalClient, + "enumerate_zones", + return_value=DEFAULT, + ) as mock_call, + ): + mock_call.side_effect = lambda system_id, filename: { + 1: [ZoneInfo(1, 1, "S1Z1")], + 2: [ZoneInfo(2, 3, "S2Z3"), ZoneInfo(2, 4, "S2Z4"), ZoneInfo(2, 5, "S2Z5")], + }.get(system_id, []) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_FILENAME: "test_reconfigure", + }, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.ABORT, result + assert result["reason"] == "reconfigured" + config_entry = hass.config_entries.async_entries()[0] + assert config_entry.data[CONF_SYSTEM_ZONE] == [ + (1, 1), + (2, 3), + (2, 4), + (2, 5), + ] diff --git a/tests/components/bryant_evolution/test_init.py b/tests/components/bryant_evolution/test_init.py new file mode 100644 index 00000000000..72734f7e117 --- /dev/null +++ b/tests/components/bryant_evolution/test_init.py @@ -0,0 +1,112 @@ +"""Test setup for the bryant_evolution integration.""" + +import logging +from unittest.mock import AsyncMock + +from evolutionhttp import BryantEvolutionLocalClient +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.bryant_evolution.const import CONF_SYSTEM_ZONE, DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_FILENAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM + +from .conftest import DEFAULT_SYSTEM_ZONES +from .test_climate import trigger_polling + +from tests.common import MockConfigEntry + +_LOGGER = logging.getLogger(__name__) + + +async def test_setup_integration_prevented_by_unavailable_client( + hass: HomeAssistant, mock_evolution_client_factory: AsyncMock +) -> None: + """Test that setup throws ConfigEntryNotReady when the client is unavailable.""" + mock_evolution_client_factory.side_effect = FileNotFoundError("test error") + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_FILENAME: "test_setup_integration_prevented_by_unavailable_client", + CONF_SYSTEM_ZONE: [(1, 1)], + }, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_integration_client_returns_none( + hass: HomeAssistant, mock_evolution_client_factory: AsyncMock +) -> None: + """Test that an unavailable client causes ConfigEntryNotReady.""" + mock_client = AsyncMock(spec=BryantEvolutionLocalClient) + mock_evolution_client_factory.side_effect = None + mock_evolution_client_factory.return_value = mock_client + mock_client.read_fan_mode.return_value = None + mock_client.read_current_temperature.return_value = None + mock_client.read_hvac_mode.return_value = None + mock_client.read_cooling_setpoint.return_value = None + mock_client.read_zone_name.return_value = None + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: [(1, 1)]}, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + assert mock_evolution_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_setup_multiple_systems_zones( + hass: HomeAssistant, + mock_evolution_client_factory: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that a device with multiple systems and zones works.""" + hass.config.units = US_CUSTOMARY_SYSTEM + mock_evolution_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_FILENAME: "/dev/ttyUSB0", CONF_SYSTEM_ZONE: DEFAULT_SYSTEM_ZONES}, + ) + mock_evolution_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_evolution_entry.entry_id) + await hass.async_block_till_done() + + # Set the temperature of each zone to its zone number so that we can + # ensure we've created the right client for each zone. + for sz, client in mock_evolution_entry.runtime_data.items(): + client.read_current_temperature.return_value = sz[1] + await trigger_polling(hass, freezer) + + # Check that each system and zone has the expected temperature value to + # verify that the initial setup flow worked as expected. + for sz in DEFAULT_SYSTEM_ZONES: + system = sz[0] + zone = sz[1] + state = hass.states.get(f"climate.system_{system}_zone_{zone}") + assert state, hass.states.async_all() + assert state.attributes["current_temperature"] == zone + + # Check that the created devices are wired to each other as expected. + device_registry = dr.async_get(hass) + + def find_device(name): + return next(filter(lambda x: x.name == name, device_registry.devices.values())) + + sam = find_device("System Access Module") + s1 = find_device("System 1") + s2 = find_device("System 2") + s1z1 = find_device("System 1 Zone 1") + s1z2 = find_device("System 1 Zone 2") + s2z3 = find_device("System 2 Zone 3") + + assert sam.via_device_id is None + assert s1.via_device_id == sam.id + assert s2.via_device_id == sam.id + assert s1z1.via_device_id == s1.id + assert s1z2.via_device_id == s1.id + assert s2z3.via_device_id == s2.id diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 224e0e0b157..862f3ae1d0c 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -1,10 +1,10 @@ """Fixtures for BSBLAN integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from bsblan import Device, Info, State import pytest -from typing_extensions import Generator from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME diff --git a/tests/components/buienradar/conftest.py b/tests/components/buienradar/conftest.py index 7c9027c7715..7872b50d4a9 100644 --- a/tests/components/buienradar/conftest.py +++ b/tests/components/buienradar/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for buienradar2.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/button/test_device_trigger.py b/tests/components/button/test_device_trigger.py index dee8045a71f..f5ade86e1a0 100644 --- a/tests/components/button/test_device_trigger.py +++ b/tests/components/button/test_device_trigger.py @@ -13,17 +13,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -109,7 +99,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -158,9 +148,9 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) @@ -169,7 +159,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -218,8 +208,8 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "2021-01-01T23:59:59+00:00") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - unknown - 2021-01-01T23:59:59+00:00 - None - 0" ) diff --git a/tests/components/button/test_init.py b/tests/components/button/test_init.py index 583c625e1b2..7df5308e096 100644 --- a/tests/components/button/test_init.py +++ b/tests/components/button/test_init.py @@ -1,11 +1,11 @@ """The tests for the Button component.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.button import ( DOMAIN, diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index 7c47ea14607..0079e59a931 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -1,11 +1,11 @@ """Test the CalDAV config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from caldav.lib.error import AuthorizationError, DAVError import pytest import requests -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.caldav.const import DOMAIN diff --git a/tests/components/caldav/test_todo.py b/tests/components/caldav/test_todo.py index 66f6e975453..69a49e0fcbe 100644 --- a/tests/components/caldav/test_todo.py +++ b/tests/components/caldav/test_todo.py @@ -8,8 +8,17 @@ from caldav.lib.error import DAVError, NotFoundError from caldav.objects import Todo import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -226,12 +235,12 @@ async def test_supported_components( RESULT_ITEM, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, {"status": "NEEDS-ACTION", "summary": "Cheese", "due": date(2023, 11, 18)}, {**RESULT_ITEM, "due": "2023-11-18"}, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -240,7 +249,7 @@ async def test_supported_components( {**RESULT_ITEM, "due": "2023-11-18T08:30:00-06:00"}, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, { "status": "NEEDS-ACTION", "summary": "Cheese", @@ -278,9 +287,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -306,9 +315,9 @@ async def test_add_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -317,7 +326,7 @@ async def test_add_item_failure( ("update_data", "expected_ics", "expected_state", "expected_item"), [ ( - {"rename": "Swiss Cheese"}, + {ATTR_RENAME: "Swiss Cheese"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -334,7 +343,7 @@ async def test_add_item_failure( }, ), ( - {"status": "needs_action"}, + {ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -351,7 +360,7 @@ async def test_add_item_failure( }, ), ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -368,7 +377,7 @@ async def test_add_item_failure( }, ), ( - {"rename": "Swiss Cheese", "status": "needs_action"}, + {ATTR_RENAME: "Swiss Cheese", ATTR_STATUS: "needs_action"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20171126", @@ -385,7 +394,7 @@ async def test_add_item_failure( }, ), ( - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ "DESCRIPTION:Any kind will do", "DUE;VALUE=DATE:20231118", @@ -402,7 +411,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": "2023-11-18T08:30:00-06:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T08:30:00-06:00"}, [ "DESCRIPTION:Any kind will do", "DUE;TZID=America/Regina:20231118T083000", @@ -419,7 +428,7 @@ async def test_add_item_failure( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, [ "DESCRIPTION:Any kind will do", "STATUS:NEEDS-ACTION", @@ -434,7 +443,7 @@ async def test_add_item_failure( }, ), ( - {"description": "Make sure to get Swiss"}, + {ATTR_DESCRIPTION: "Make sure to get Swiss"}, [ "DESCRIPTION:Make sure to get Swiss", "DUE;VALUE=DATE:20171126", @@ -451,7 +460,7 @@ async def test_add_item_failure( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, ["DUE;VALUE=DATE:20171126", "STATUS:NEEDS-ACTION", "SUMMARY:Cheese"], "1", { @@ -501,12 +510,12 @@ async def test_update_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", + ATTR_ITEM: "Cheese", **update_data, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -520,9 +529,9 @@ async def test_update_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, return_response=True, ) @@ -548,12 +557,12 @@ async def test_update_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV save error"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -582,12 +591,12 @@ async def test_update_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "status": "completed", + ATTR_ITEM: "Cheese", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -635,9 +644,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids_to_delete}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids_to_delete}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -668,9 +677,9 @@ async def test_remove_item_lookup_failure( with pytest.raises(HomeAssistantError, match=match): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -697,9 +706,9 @@ async def test_remove_item_failure( with pytest.raises(HomeAssistantError, match="CalDAV delete error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -725,9 +734,9 @@ async def test_remove_item_not_found( with pytest.raises(HomeAssistantError, match="Could not find"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": "Cheese"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "Cheese"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -779,12 +788,12 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "Cheese", - "rename": "Milk", + ATTR_ITEM: "Cheese", + ATTR_RENAME: "Milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/calendar/conftest.py b/tests/components/calendar/conftest.py index 83ecaca97d3..3e18f595764 100644 --- a/tests/components/calendar/conftest.py +++ b/tests/components/calendar/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for calendar sensor platforms.""" +from collections.abc import Generator import datetime import secrets from typing import Any from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components.calendar import DOMAIN, CalendarEntity, CalendarEvent from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/calendar/test_init.py b/tests/components/calendar/test_init.py index 116ca70f15e..4ad5e11b8e4 100644 --- a/tests/components/calendar/test_init.py +++ b/tests/components/calendar/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from typing import Any @@ -9,7 +10,6 @@ from typing import Any from freezegun import freeze_time import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator import voluptuous as vol from homeassistant.components.calendar import DOMAIN, SERVICE_GET_EVENTS @@ -23,7 +23,7 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture(name="frozen_time") -def mock_frozen_time() -> None: +def mock_frozen_time() -> str | None: """Fixture to set a frozen time used in tests. This is needed so that it can run before other fixtures. @@ -32,7 +32,7 @@ def mock_frozen_time() -> None: @pytest.fixture(autouse=True) -def mock_set_frozen_time(frozen_time: Any) -> Generator[None]: +def mock_set_frozen_time(frozen_time: str | None) -> Generator[None]: """Fixture to freeze time that also can work for other fixtures.""" if not frozen_time: yield @@ -44,9 +44,9 @@ def mock_set_frozen_time(frozen_time: Any) -> Generator[None]: @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - set_time_zone: Any, - frozen_time: Any, - mock_setup_integration: Any, + set_time_zone: None, + frozen_time: str | None, + mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test and fixtures are set up in the right order.""" diff --git a/tests/components/calendar/test_recorder.py b/tests/components/calendar/test_recorder.py index aeddebc226c..c7511b8b2b0 100644 --- a/tests/components/calendar/test_recorder.py +++ b/tests/components/calendar/test_recorder.py @@ -1,7 +1,6 @@ """The tests for calendar recorder.""" from datetime import timedelta -from typing import Any import pytest @@ -19,7 +18,7 @@ from tests.components.recorder.common import async_wait_recording_done async def mock_setup_dependencies( recorder_mock: Recorder, hass: HomeAssistant, - set_time_zone: Any, + set_time_zone: None, mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: diff --git a/tests/components/calendar/test_trigger.py b/tests/components/calendar/test_trigger.py index 3b415d46e63..dfe4622e82e 100644 --- a/tests/components/calendar/test_trigger.py +++ b/tests/components/calendar/test_trigger.py @@ -9,7 +9,7 @@ forward exercising the triggers. from __future__ import annotations -from collections.abc import AsyncIterator, Callable +from collections.abc import AsyncIterator, Callable, Generator from contextlib import asynccontextmanager import datetime import logging @@ -19,7 +19,6 @@ import zoneinfo from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components import automation, calendar from homeassistant.components.calendar.trigger import EVENT_END, EVENT_START @@ -85,9 +84,7 @@ class FakeSchedule: @pytest.fixture -def fake_schedule( - hass: HomeAssistant, freezer: FrozenDateTimeFactory -) -> Generator[FakeSchedule]: +def fake_schedule(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> FakeSchedule: """Fixture that tests can use to make fake events.""" # Setup start time for all tests @@ -105,7 +102,7 @@ def mock_test_entity(test_entities: list[MockCalendarEntity]) -> MockCalendarEnt @pytest.fixture(name="setup_platform", autouse=True) async def mock_setup_platform( hass: HomeAssistant, - mock_setup_integration: Any, + mock_setup_integration: None, config_entry: MockConfigEntry, ) -> None: """Fixture to setup platforms used in the test.""" diff --git a/tests/components/camera/conftest.py b/tests/components/camera/conftest.py index 524b56c2303..ea3d65f4864 100644 --- a/tests/components/camera/conftest.py +++ b/tests/components/camera/conftest.py @@ -1,9 +1,9 @@ """Test helpers for camera.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import PropertyMock, patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components import camera from homeassistant.components.camera.const import StreamType diff --git a/tests/components/camera/test_init.py b/tests/components/camera/test_init.py index 7da6cd91a7a..098c321e63b 100644 --- a/tests/components/camera/test_init.py +++ b/tests/components/camera/test_init.py @@ -1,12 +1,12 @@ """The tests for the camera component.""" +from collections.abc import Generator from http import HTTPStatus import io from types import ModuleType from unittest.mock import AsyncMock, Mock, PropertyMock, mock_open, patch import pytest -from typing_extensions import Generator from homeassistant.components import camera from homeassistant.components.camera.const import ( diff --git a/tests/components/canary/conftest.py b/tests/components/canary/conftest.py index 583986fd483..07a3ce89495 100644 --- a/tests/components/canary/conftest.py +++ b/tests/components/canary/conftest.py @@ -1,10 +1,10 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from canary.api import Api import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 2c0c36d6632..7dce3f768e2 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -148,6 +148,7 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] + return None @pytest.mark.parametrize( diff --git a/tests/components/ccm15/conftest.py b/tests/components/ccm15/conftest.py index d6cc66d77dc..e393b2679b6 100644 --- a/tests/components/ccm15/conftest.py +++ b/tests/components/ccm15/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Midea ccm15 AC Controller tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from ccm15 import CCM15DeviceState, CCM15SlaveDevice import pytest -from typing_extensions import Generator @pytest.fixture @@ -17,7 +17,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def ccm15_device() -> Generator[AsyncMock]: +def ccm15_device() -> Generator[None]: """Mock ccm15 device.""" ccm15_devices = { 0: CCM15SlaveDevice(bytes.fromhex("000000b0b8001b")), @@ -32,7 +32,7 @@ def ccm15_device() -> Generator[AsyncMock]: @pytest.fixture -def network_failure_ccm15_device() -> Generator[AsyncMock]: +def network_failure_ccm15_device() -> Generator[None]: """Mock empty set of ccm15 device.""" device_state = CCM15DeviceState(devices={}) with patch( diff --git a/tests/components/ccm15/test_climate.py b/tests/components/ccm15/test_climate.py index 329caafd11c..785cb17c6a9 100644 --- a/tests/components/ccm15/test_climate.py +++ b/tests/components/ccm15/test_climate.py @@ -1,10 +1,11 @@ """Unit test for CCM15 coordinator component.""" from datetime import timedelta -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from ccm15 import CCM15DeviceState from freezegun.api import FrozenDateTimeFactory +import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -27,11 +28,11 @@ from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, async_fire_time_changed +@pytest.mark.usefixtures("ccm15_device") async def test_climate_state( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - ccm15_device: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test the coordinator.""" diff --git a/tests/components/ccm15/test_diagnostics.py b/tests/components/ccm15/test_diagnostics.py index a433591d86e..f6f0d75c4e3 100644 --- a/tests/components/ccm15/test_diagnostics.py +++ b/tests/components/ccm15/test_diagnostics.py @@ -1,7 +1,6 @@ """Test CCM15 diagnostics.""" -from unittest.mock import AsyncMock - +import pytest from syrupy import SnapshotAssertion from homeassistant.components.ccm15.const import DOMAIN @@ -13,10 +12,10 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.usefixtures("ccm15_device") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - ccm15_device: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/ccm15/test_init.py b/tests/components/ccm15/test_init.py index 3069b61f10f..0fb75920ad3 100644 --- a/tests/components/ccm15/test_init.py +++ b/tests/components/ccm15/test_init.py @@ -1,6 +1,6 @@ """Tests for the ccm15 component.""" -from unittest.mock import AsyncMock +import pytest from homeassistant.components.ccm15.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -10,7 +10,8 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry -async def test_load_unload(hass: HomeAssistant, ccm15_device: AsyncMock) -> None: +@pytest.mark.usefixtures("ccm15_device") +async def test_load_unload(hass: HomeAssistant) -> None: """Test options flow.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/cert_expiry/conftest.py b/tests/components/cert_expiry/conftest.py index 2a86c669970..4932e9e1869 100644 --- a/tests/components/cert_expiry/conftest.py +++ b/tests/components/cert_expiry/conftest.py @@ -1,9 +1,9 @@ """Configuration for cert_expiry tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/chacon_dio/__init__.py b/tests/components/chacon_dio/__init__.py new file mode 100644 index 00000000000..2a340097eb2 --- /dev/null +++ b/tests/components/chacon_dio/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Chacon Dio integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/chacon_dio/conftest.py b/tests/components/chacon_dio/conftest.py new file mode 100644 index 00000000000..3c3b970cec0 --- /dev/null +++ b/tests/components/chacon_dio/conftest.py @@ -0,0 +1,71 @@ +"""Common fixtures for the chacon_dio tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.chacon_dio.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + +MOCK_COVER_DEVICE = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "name": "Shutter mock 1", + "type": "SHUTTER", + "model": "CERSwd-3B_1.0.6", + "connected": True, + "openlevel": 75, + "movement": "stop", + } +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.chacon_dio.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="test_entry_unique_id", + data={ + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + +@pytest.fixture +def mock_dio_chacon_client() -> Generator[AsyncMock]: + """Mock a Dio Chacon client.""" + + with ( + patch( + "homeassistant.components.chacon_dio.DIOChaconAPIClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.chacon_dio.config_flow.DIOChaconAPIClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.get_user_id.return_value = "dummy-user-id" + client.search_all_devices.return_value = MOCK_COVER_DEVICE + + client.move_shutter_direction.return_value = {} + client.disconnect.return_value = {} + + yield client diff --git a/tests/components/chacon_dio/snapshots/test_cover.ambr b/tests/components/chacon_dio/snapshots/test_cover.ambr new file mode 100644 index 00000000000..b2febe20070 --- /dev/null +++ b/tests/components/chacon_dio/snapshots/test_cover.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_entities[cover.shutter_mock_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.shutter_mock_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'chacon_dio', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'L4HActuator_idmock1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[cover.shutter_mock_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 75, + 'device_class': 'shutter', + 'friendly_name': 'Shutter mock 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.shutter_mock_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/chacon_dio/test_config_flow.py b/tests/components/chacon_dio/test_config_flow.py new file mode 100644 index 00000000000..d72b5a7dec3 --- /dev/null +++ b/tests/components/chacon_dio/test_config_flow.py @@ -0,0 +1,122 @@ +"""Test the chacon_dio config flow.""" + +from unittest.mock import AsyncMock + +from dio_chacon_wifi_api.exceptions import DIOChaconAPIError, DIOChaconInvalidAuthError +import pytest + +from homeassistant.components.chacon_dio.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_dio_chacon_client: AsyncMock +) -> None: + """Test the full flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Chacon DiO dummylogin" + assert result["result"].unique_id == "dummy-user-id" + assert result["data"] == { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + } + + +@pytest.mark.parametrize( + ("exception", "expected"), + [ + (Exception("Bad request Boy :) --"), {"base": "unknown"}), + (DIOChaconInvalidAuthError, {"base": "invalid_auth"}), + (DIOChaconAPIError, {"base": "cannot_connect"}), + ], +) +async def test_errors( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_dio_chacon_client: AsyncMock, + exception: Exception, + expected: dict[str, str], +) -> None: + """Test we handle any error.""" + mock_dio_chacon_client.get_user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_USERNAME: "nada", + CONF_PASSWORD: "nadap", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == expected + + # Test of recover in normal state after correction of the 1st error + mock_dio_chacon_client.get_user_id.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Chacon DiO dummylogin" + assert result["result"].unique_id == "dummy-user-id" + assert result["data"] == { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test abort when setting up duplicate entry.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + mock_dio_chacon_client.get_user_id.return_value = "test_entry_unique_id" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "dummylogin", + CONF_PASSWORD: "dummypass", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py new file mode 100644 index 00000000000..be606e67e1e --- /dev/null +++ b/tests/components/chacon_dio/test_cover.py @@ -0,0 +1,157 @@ +"""Test the Chacon Dio cover.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + SERVICE_STOP_COVER, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +COVER_ENTITY_ID = "cover.shutter_mock_1" + + +async def test_entities( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_cover_actions( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_CLOSING + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPEN + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPENING + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_POSITION: 25, ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + await hass.async_block_till_done() + state = hass.states.get(COVER_ENTITY_ID) + assert state.state == STATE_OPENING + + +async def test_cover_callbacks( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + # Server side callback tests + # We find the callback method on the mock client + callback_device_state_function: Callable = ( + mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] + ) + + # Define a method to simply call it + async def _callback_device_state_function(open_level: int, movement: str) -> None: + callback_device_state_function( + { + "id": "L4HActuator_idmock1", + "connected": True, + "openlevel": open_level, + "movement": movement, + } + ) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + await _callback_device_state_function(79, "stop") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 79 + assert state.state == STATE_OPEN + + await _callback_device_state_function(90, "up") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 90 + assert state.state == STATE_OPENING + + await _callback_device_state_function(60, "down") + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 60 + assert state.state == STATE_CLOSING + + +async def test_no_cover_found( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cover absence.""" + + mock_dio_chacon_client.search_all_devices.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get(COVER_ENTITY_ID) diff --git a/tests/components/chacon_dio/test_init.py b/tests/components/chacon_dio/test_init.py new file mode 100644 index 00000000000..78f1a85c71a --- /dev/null +++ b/tests/components/chacon_dio/test_init.py @@ -0,0 +1,43 @@ +"""Test the Dio Chacon Cover init.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_cover_unload_entry( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Dio Chacon covers.""" + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + mock_dio_chacon_client.disconnect.assert_called() + + +async def test_cover_shutdown_event( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Dio Chacon covers.""" + + await setup_integration(hass, mock_config_entry) + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + mock_dio_chacon_client.disconnect.assert_called() diff --git a/tests/components/climate/conftest.py b/tests/components/climate/conftest.py index a3a6af6e8a3..fd4368c4219 100644 --- a/tests/components/climate/conftest.py +++ b/tests/components/climate/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Climate platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/climate/test_device_condition.py b/tests/components/climate/test_device_condition.py index 0961bd3dc73..16595f57c6f 100644 --- a/tests/components/climate/test_device_condition.py +++ b/tests/components/climate/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -151,7 +141,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -220,7 +210,7 @@ async def test_if_state( # Should not fire, entity doesn't exist yet hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set( entry.entity_id, @@ -232,8 +222,8 @@ async def test_if_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" hass.states.async_set( entry.entity_id, @@ -246,13 +236,13 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_preset_mode - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_preset_mode - event - test_event2" hass.states.async_set( entry.entity_id, @@ -265,14 +255,14 @@ async def test_if_state( # Should not fire hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -323,8 +313,8 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_hvac_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_hvac_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/climate/test_device_trigger.py b/tests/components/climate/test_device_trigger.py index e8e5b577bf4..a492d9805b5 100644 --- a/tests/components/climate/test_device_trigger.py +++ b/tests/components/climate/test_device_trigger.py @@ -23,11 +23,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -35,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -151,7 +141,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -236,8 +226,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "hvac_mode_changed" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "hvac_mode_changed" # Fake that the temperature is changing hass.states.async_set( @@ -250,8 +240,8 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "current_temperature_changed" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "current_temperature_changed" # Fake that the humidity is changing hass.states.async_set( @@ -264,15 +254,15 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "current_humidity_changed" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "current_humidity_changed" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -329,8 +319,8 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "hvac_mode_changed" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "hvac_mode_changed" async def test_get_trigger_capabilities_hvac_mode(hass: HomeAssistant) -> None: diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 4756c265aea..f306551e540 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from enum import Enum from types import ModuleType +from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -17,9 +18,14 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.components.climate.const import ( + ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, + ATTR_MAX_TEMP, + ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, @@ -27,7 +33,13 @@ from homeassistant.components.climate.const import ( ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, UnitOfTemperature +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_WHOLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -158,7 +170,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert climate.turn_off.called -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [ (enum_field, constant_prefix) for enum_field in enum @@ -1152,3 +1164,127 @@ async def test_no_issue_no_aux_property( "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " "and will be unsupported from Home Assistant 2024.10." ) not in caplog.text + + +async def test_temperature_validation( + hass: HomeAssistant, config_flow_fixture: None +) -> None: + """Test validation for temperatures.""" + + class MockClimateEntityTemp(MockClimateEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + _attr_target_temperature = 15 + _attr_target_temperature_high = 18 + _attr_target_temperature_low = 10 + _attr_target_temperature_step = PRECISION_WHOLE + + def set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if ATTR_TEMPERATURE in kwargs: + self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] + if ATTR_TARGET_TEMP_HIGH in kwargs: + self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] + self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTemp(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None + assert state.attributes.get(ATTR_MIN_TEMP) == 7 + assert state.attributes.get(ATTR_MAX_TEMP) == 35 + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 40.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TEMPERATURE: "40", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 40.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 0.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "0", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 0.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "10", + }, + blocking=True, + ) + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 10 + assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25 diff --git a/tests/components/climate/test_intent.py b/tests/components/climate/test_intent.py index ab1e3629ef8..54e2e4ff1a6 100644 --- a/tests/components/climate/test_intent.py +++ b/tests/components/climate/test_intent.py @@ -1,7 +1,8 @@ """Test climate intents.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.components import conversation from homeassistant.components.climate import ( diff --git a/tests/components/climate/test_reproduce_state.py b/tests/components/climate/test_reproduce_state.py index 636ab326a2b..0632ebcc9e4 100644 --- a/tests/components/climate/test_reproduce_state.py +++ b/tests/components/climate/test_reproduce_state.py @@ -3,7 +3,6 @@ import pytest from homeassistant.components.climate import ( - ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HUMIDITY, ATTR_PRESET_MODE, @@ -11,7 +10,6 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN, - SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -96,7 +94,6 @@ async def test_state_with_context(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("service", "attribute"), [ - (SERVICE_SET_AUX_HEAT, ATTR_AUX_HEAT), (SERVICE_SET_PRESET_MODE, ATTR_PRESET_MODE), (SERVICE_SET_SWING_MODE, ATTR_SWING_MODE), (SERVICE_SET_FAN_MODE, ATTR_FAN_MODE), diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index c7d0702ea88..3a5d333f9b8 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -1,6 +1,6 @@ """Fixtures for cloud tests.""" -from collections.abc import Callable, Coroutine +from collections.abc import AsyncGenerator, Callable, Coroutine, Generator from pathlib import Path from typing import Any from unittest.mock import DEFAULT, MagicMock, PropertyMock, patch @@ -15,7 +15,6 @@ from hass_nabucasa.remote import RemoteUI from hass_nabucasa.voice import Voice import jwt import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.cloud.client import CloudClient from homeassistant.components.cloud.const import DATA_CLOUD diff --git a/tests/components/cloud/test_account_link.py b/tests/components/cloud/test_account_link.py index acaff7db76c..cd81a7cf691 100644 --- a/tests/components/cloud/test_account_link.py +++ b/tests/components/cloud/test_account_link.py @@ -1,12 +1,12 @@ """Test account link services.""" import asyncio +from collections.abc import Generator import logging from time import time from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.cloud import account_link diff --git a/tests/components/cloud/test_binary_sensor.py b/tests/components/cloud/test_binary_sensor.py index 789947f3c7d..8a4a1a0e9aa 100644 --- a/tests/components/cloud/test_binary_sensor.py +++ b/tests/components/cloud/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for the cloud binary sensor.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from hass_nabucasa.const import DISPATCH_REMOTE_CONNECT, DISPATCH_REMOTE_DISCONNECT import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_registry import EntityRegistry diff --git a/tests/components/cloud/test_stt.py b/tests/components/cloud/test_stt.py index df9e62380f8..02acda1450e 100644 --- a/tests/components/cloud/test_stt.py +++ b/tests/components/cloud/test_stt.py @@ -1,5 +1,6 @@ """Test the speech-to-text platform for the cloud integration.""" +from collections.abc import AsyncGenerator from copy import deepcopy from http import HTTPStatus from typing import Any @@ -7,7 +8,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import STTResponse, VoiceError import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY from homeassistant.components.cloud.const import DOMAIN diff --git a/tests/components/cloud/test_tts.py b/tests/components/cloud/test_tts.py index bf45b6b2895..52a9bc19ea2 100644 --- a/tests/components/cloud/test_tts.py +++ b/tests/components/cloud/test_tts.py @@ -1,6 +1,6 @@ """Tests for cloud tts.""" -from collections.abc import Callable, Coroutine +from collections.abc import AsyncGenerator, Callable, Coroutine from copy import deepcopy from http import HTTPStatus from typing import Any @@ -8,7 +8,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from hass_nabucasa.voice import TTS_VOICES, VoiceError, VoiceTokenError import pytest -from typing_extensions import AsyncGenerator import voluptuous as vol from homeassistant.components.assist_pipeline.pipeline import STORAGE_KEY diff --git a/tests/components/cloudflare/conftest.py b/tests/components/cloudflare/conftest.py index 6c41e9fd179..977126f39a3 100644 --- a/tests/components/cloudflare/conftest.py +++ b/tests/components/cloudflare/conftest.py @@ -1,9 +1,9 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from . import get_mock_client diff --git a/tests/components/co2signal/conftest.py b/tests/components/co2signal/conftest.py index 04ab6db7464..d5cca448569 100644 --- a/tests/components/co2signal/conftest.py +++ b/tests/components/co2signal/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Electricity maps integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.co2signal import DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/co2signal/test_diagnostics.py b/tests/components/co2signal/test_diagnostics.py index edc0007952b..3d5e1a0580b 100644 --- a/tests/components/co2signal/test_diagnostics.py +++ b/tests/components/co2signal/test_diagnostics.py @@ -2,6 +2,7 @@ import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -20,4 +21,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/coinbase/test_diagnostics.py b/tests/components/coinbase/test_diagnostics.py index e30bdef30b8..0e06c172c37 100644 --- a/tests/components/coinbase/test_diagnostics.py +++ b/tests/components/coinbase/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -40,4 +41,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/comfoconnect/test_sensor.py b/tests/components/comfoconnect/test_sensor.py index 91e7e1f0e25..fdecfa5b1c7 100644 --- a/tests/components/comfoconnect/test_sensor.py +++ b/tests/components/comfoconnect/test_sensor.py @@ -1,9 +1,9 @@ """Tests for the comfoconnect sensor platform.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.sensor import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index 98bfb856bb8..c775d87fedb 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -3,6 +3,7 @@ from __future__ import annotations import os +from pathlib import Path import subprocess import tempfile from unittest.mock import patch @@ -78,9 +79,7 @@ async def test_command_line_output(hass: HomeAssistant) -> None: await hass.services.async_call( NOTIFY_DOMAIN, "test3", {"message": message}, blocking=True ) - with open(filename, encoding="UTF-8") as handle: - # the echo command adds a line break - assert message == handle.read() + assert message == await hass.async_add_executor_job(Path(filename).read_text) @pytest.mark.parametrize( diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index 26f97e37543..eeccf2c358e 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -467,6 +467,46 @@ async def test_update_with_unnecessary_json_attrs( assert "key_three" not in entity_state.attributes +@pytest.mark.parametrize( + "get_config", + [ + { + "command_line": [ + { + "sensor": { + "name": "Test", + "command": 'echo \ + {\ + \\"top_level\\": {\ + \\"second_level\\": {\ + \\"key\\": \\"some_json_value\\",\ + \\"another_key\\": \\"another_json_value\\",\ + \\"key_three\\": \\"value_three\\"\ + }\ + }\ + }', + "json_attributes": ["key", "another_key", "key_three"], + "json_attributes_path": "$.top_level.second_level", + } + } + ] + } + ], +) +async def test_update_with_json_attrs_with_json_attrs_path( + hass: HomeAssistant, load_yaml_integration: None +) -> None: + """Test using json_attributes_path to select a different part of the json object as root.""" + + entity_state = hass.states.get("sensor.test") + assert entity_state + assert entity_state.attributes["key"] == "some_json_value" + assert entity_state.attributes["another_key"] == "another_json_value" + assert entity_state.attributes["key_three"] == "value_three" + assert "top_level" not in entity_state.attributes + assert "second_level" not in entity_state.attributes + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/config/conftest.py b/tests/components/config/conftest.py index c401ac19fa9..55393a219b1 100644 --- a/tests/components/config/conftest.py +++ b/tests/components/config/conftest.py @@ -1,5 +1,6 @@ """Test fixtures for the config integration.""" +from collections.abc import Generator from contextlib import contextmanager from copy import deepcopy import json @@ -9,7 +10,6 @@ from typing import Any from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/config/test_area_registry.py b/tests/components/config/test_area_registry.py index fb59725fd29..03a8272e586 100644 --- a/tests/components/config/test_area_registry.py +++ b/tests/components/config/test_area_registry.py @@ -1,11 +1,15 @@ """Test area_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import area_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar +from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -21,10 +25,17 @@ async def client_fixture( async def test_list_areas( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_area1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_area1) area1 = area_registry.async_create("mock 1") + + created_area2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_area2) area2 = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, @@ -46,6 +57,8 @@ async def test_list_areas( "labels": [], "name": "mock 1", "picture": None, + "created_at": created_area1.timestamp(), + "modified_at": created_area1.timestamp(), }, { "aliases": unordered(["alias_1", "alias_2"]), @@ -55,12 +68,16 @@ async def test_list_areas( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": created_area2.timestamp(), + "modified_at": created_area2.timestamp(), }, ] async def test_create_area( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" # Create area with only mandatory parameters @@ -78,6 +95,8 @@ async def test_create_area( "labels": [], "name": "mock", "picture": None, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 1 @@ -104,6 +123,8 @@ async def test_create_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), } assert len(area_registry.areas) == 2 @@ -161,10 +182,16 @@ async def test_delete_non_existing_area( async def test_update_area( - client: MockHAClientWebSocket, area_registry: ar.AreaRegistry + client: MockHAClientWebSocket, + area_registry: ar.AreaRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) area = area_registry.async_create("mock 1") + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -189,9 +216,14 @@ async def test_update_area( "labels": unordered(["label_1", "label_2"]), "name": "mock 2", "picture": "/image/example.png", + "created_at": created_at.timestamp(), + "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "aliases": ["alias_1", "alias_1"], @@ -214,6 +246,8 @@ async def test_update_area( "labels": [], "name": "mock 2", "picture": None, + "created_at": created_at.timestamp(), + "modified_at": modified_at.timestamp(), } assert len(area_registry.areas) == 1 diff --git a/tests/components/config/test_automation.py b/tests/components/config/test_automation.py index f907732109d..89113070367 100644 --- a/tests/components/config/test_automation.py +++ b/tests/components/config/test_automation.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import automation from homeassistant.const import STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator diff --git a/tests/components/config/test_category_registry.py b/tests/components/config/test_category_registry.py index b4d171535b6..d4fe6a0c9b9 100644 --- a/tests/components/config/test_category_registry.py +++ b/tests/components/config/test_category_registry.py @@ -1,10 +1,14 @@ """Test category registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import category_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr +from homeassistant.util.dt import utcnow from tests.common import ANY from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -19,6 +23,7 @@ async def client_fixture( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_categories( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, @@ -53,11 +58,15 @@ async def test_list_categories( assert len(msg["result"]) == 2 assert msg["result"][0] == { "category_id": category1.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Energy saving", "icon": "mdi:leaf", } assert msg["result"][1] == { "category_id": category2.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Something else", "icon": "mdi:home", } @@ -71,6 +80,8 @@ async def test_list_categories( assert len(msg["result"]) == 1 assert msg["result"][0] == { "category_id": category3.category_id, + "created_at": utcnow().timestamp(), + "modified_at": utcnow().timestamp(), "name": "Grocery stores", "icon": "mdi:store", } @@ -79,8 +90,11 @@ async def test_list_categories( async def test_create_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" + created1 = datetime(2024, 2, 14, 12, 0, 0) + freezer.move_to(created1) await client.send_json_auto_id( { "type": "config/category_registry/create", @@ -98,9 +112,14 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, + "created_at": created1.timestamp(), + "modified_at": created1.timestamp(), "name": "Energy saving", } + created2 = datetime(2024, 3, 14, 12, 0, 0) + freezer.move_to(created2) + await client.send_json_auto_id( { "scope": "automation", @@ -117,9 +136,14 @@ async def test_create_category( assert msg["result"] == { "icon": None, "category_id": ANY, + "created_at": created2.timestamp(), + "modified_at": created2.timestamp(), "name": "Something else", } + created3 = datetime(2024, 4, 14, 12, 0, 0) + freezer.move_to(created3) + # Test adding the same one again in a different scope await client.send_json_auto_id( { @@ -139,6 +163,8 @@ async def test_create_category( assert msg["result"] == { "icon": "mdi:leaf", "category_id": ANY, + "created_at": created3.timestamp(), + "modified_at": created3.timestamp(), "name": "Energy saving", } @@ -249,8 +275,11 @@ async def test_delete_non_existing_category( async def test_update_category( client: MockHAClientWebSocket, category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created = datetime(2024, 2, 14, 12, 0, 0) + freezer.move_to(created) category = category_registry.async_create( scope="automation", name="Energy saving", @@ -258,6 +287,9 @@ async def test_update_category( assert len(category_registry.categories) == 1 assert len(category_registry.categories["automation"]) == 1 + modified = datetime(2024, 3, 14, 12, 0, 0) + freezer.move_to(modified) + await client.send_json_auto_id( { "scope": "automation", @@ -275,9 +307,14 @@ async def test_update_category( assert msg["result"] == { "icon": "mdi:left", "category_id": category.category_id, + "created_at": created.timestamp(), + "modified_at": modified.timestamp(), "name": "ENERGY SAVING", } + modified = datetime(2024, 4, 14, 12, 0, 0) + freezer.move_to(modified) + await client.send_json_auto_id( { "scope": "automation", @@ -295,6 +332,8 @@ async def test_update_category( assert msg["result"] == { "icon": None, "category_id": category.category_id, + "created_at": created.timestamp(), + "modified_at": modified.timestamp(), "name": "Energy saving", } diff --git a/tests/components/config/test_config_entries.py b/tests/components/config/test_config_entries.py index e023a60f215..a4dc91d5355 100644 --- a/tests/components/config/test_config_entries.py +++ b/tests/components/config/test_config_entries.py @@ -1,12 +1,13 @@ """Test config entries API.""" from collections import OrderedDict +from collections.abc import Generator from http import HTTPStatus from unittest.mock import ANY, AsyncMock, patch from aiohttp.test_utils import TestClient +from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant import config_entries as core_ce, data_entry_flow, loader @@ -18,6 +19,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_flow, config_validation as cv from homeassistant.loader import IntegrationNotFound from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -69,6 +71,7 @@ def mock_flow() -> Generator[None]: yield +@pytest.mark.usefixtures("freezer") @pytest.mark.usefixtures("clear_handlers", "mock_flow") async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: """Test get entries.""" @@ -124,12 +127,15 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: data = await resp.json() for entry in data: entry.pop("entry_id") + timestamp = utcnow().timestamp() assert data == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -142,10 +148,12 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -158,10 +166,12 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": core_ce.ConfigEntryDisabler.USER, "domain": "comp3", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -174,10 +184,12 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -190,10 +202,12 @@ async def test_get_entries(hass: HomeAssistant, client: TestClient) -> None: "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -509,7 +523,7 @@ async def test_abort(hass: HomeAssistant, client: TestClient) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations") +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: """Test a flow that creates an account.""" mock_platform(hass, "test.config_flow", None) @@ -536,6 +550,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 + timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -544,11 +559,13 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: "type": "create_entry", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -567,7 +584,7 @@ async def test_create_account(hass: HomeAssistant, client: TestClient) -> None: } -@pytest.mark.usefixtures("enable_custom_integrations") +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: """Test we can finish a two step flow.""" mock_integration( @@ -616,6 +633,7 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: entries = hass.config_entries.async_entries("test") assert len(entries) == 1 + timestamp = utcnow().timestamp() data = await resp.json() data.pop("flow_id") assert data == { @@ -624,11 +642,13 @@ async def test_two_step_flow(hass: HomeAssistant, client: TestClient) -> None: "title": "user-title", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1059,6 +1079,7 @@ async def test_options_flow_with_invalid_data( assert data == {"errors": {"choices": "invalid is not a valid option"}} +@pytest.mark.usefixtures("freezer") async def test_get_single( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1080,13 +1101,16 @@ async def test_get_single( ) response = await ws_client.receive_json() + timestamp = utcnow().timestamp() assert response["success"] assert response["result"]["config_entry"] == { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entry.entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1366,7 +1390,7 @@ async def test_ignore_flow_nonexisting( assert response["error"]["code"] == "not_found" -@pytest.mark.usefixtures("clear_handlers") +@pytest.mark.usefixtures("clear_handlers", "freezer") async def test_get_matching_entries_ws( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: @@ -1420,13 +1444,16 @@ async def test_get_matching_entries_ws( await ws_client.send_json_auto_id({"type": "config_entries/get"}) response = await ws_client.receive_json() + timestamp = utcnow().timestamp() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1439,11 +1466,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1456,11 +1485,13 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1473,11 +1504,13 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1490,11 +1523,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1518,11 +1553,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1545,11 +1582,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1562,11 +1601,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1589,11 +1630,13 @@ async def test_get_matching_entries_ws( response = await ws_client.receive_json() assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1606,11 +1649,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1639,11 +1684,13 @@ async def test_get_matching_entries_ws( assert response["result"] == [ { + "created_at": timestamp, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1656,11 +1703,13 @@ async def test_get_matching_entries_ws( "title": "Test 1", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1673,11 +1722,13 @@ async def test_get_matching_entries_ws( "title": "Test 2", }, { + "created_at": timestamp, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1690,11 +1741,13 @@ async def test_get_matching_entries_ws( "title": "Test 3", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp4", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1707,11 +1760,13 @@ async def test_get_matching_entries_ws( "title": "Test 4", }, { + "created_at": timestamp, "disabled_by": None, "domain": "comp5", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1759,7 +1814,9 @@ async def test_get_matching_entries_ws( @pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, ) -> None: """Test subscribe entries with the websocket api.""" assert await async_setup_component(hass, "config", {}) @@ -1805,15 +1862,18 @@ async def test_subscribe_entries_ws( assert response["type"] == "result" response = await ws_client.receive_json() assert response["id"] == 5 + created = utcnow().timestamp() assert response["event"] == [ { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1829,11 +1889,13 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp2", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": "Unsupported API", @@ -1849,11 +1911,13 @@ async def test_subscribe_entries_ws( { "type": None, "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1867,17 +1931,21 @@ async def test_subscribe_entries_ws( }, }, ] + freezer.tick() + modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1892,17 +1960,21 @@ async def test_subscribe_entries_ws( "type": "updated", } ] + freezer.tick() + modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1917,17 +1989,20 @@ async def test_subscribe_entries_ws( "type": "removed", } ] + freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -1946,9 +2021,12 @@ async def test_subscribe_entries_ws( @pytest.mark.usefixtures("clear_handlers") async def test_subscribe_entries_ws_filtered( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, ) -> None: """Test subscribe entries with the websocket api with a type filter.""" + created = utcnow().timestamp() assert await async_setup_component(hass, "config", {}) mock_integration(hass, MockModule("comp1")) mock_integration( @@ -2008,11 +2086,13 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2028,11 +2108,13 @@ async def test_subscribe_entries_ws_filtered( { "type": None, "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": created, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2046,6 +2128,8 @@ async def test_subscribe_entries_ws_filtered( }, }, ] + freezer.tick() + modified = utcnow().timestamp() assert hass.config_entries.async_update_entry(entry, title="changed") assert hass.config_entries.async_update_entry(entry3, title="changed too") assert hass.config_entries.async_update_entry(entry4, title="changed but ignored") @@ -2054,11 +2138,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2078,11 +2164,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": "user", "domain": "comp3", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2097,6 +2185,8 @@ async def test_subscribe_entries_ws_filtered( "type": "updated", } ] + freezer.tick() + modified = utcnow().timestamp() await hass.config_entries.async_remove(entry.entry_id) await hass.config_entries.async_remove(entry2.entry_id) response = await ws_client.receive_json() @@ -2104,11 +2194,13 @@ async def test_subscribe_entries_ws_filtered( assert response["event"] == [ { "entry": { + "created_at": created, "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": modified, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2123,17 +2215,20 @@ async def test_subscribe_entries_ws_filtered( "type": "removed", } ] + freezer.tick() await hass.config_entries.async_add(entry) response = await ws_client.receive_json() assert response["id"] == 5 assert response["event"] == [ { "entry": { + "created_at": entry.created_at.timestamp(), "disabled_by": None, "domain": "comp1", "entry_id": ANY, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": entry.modified_at.timestamp(), "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, @@ -2238,8 +2333,11 @@ async def test_flow_with_multiple_schema_errors_base( } -@pytest.mark.usefixtures("enable_custom_integrations") -async def test_supports_reconfigure(hass: HomeAssistant, client: TestClient) -> None: +@pytest.mark.usefixtures("enable_custom_integrations", "freezer") +async def test_supports_reconfigure( + hass: HomeAssistant, + client: TestClient, +) -> None: """Test a flow that support reconfigure step.""" mock_platform(hass, "test.config_flow", None) @@ -2297,6 +2395,7 @@ async def test_supports_reconfigure(hass: HomeAssistant, client: TestClient) -> assert len(entries) == 1 data = await resp.json() + timestamp = utcnow().timestamp() data.pop("flow_id") assert data == { "handler": "test", @@ -2304,11 +2403,13 @@ async def test_supports_reconfigure(hass: HomeAssistant, client: TestClient) -> "type": "create_entry", "version": 1, "result": { + "created_at": timestamp, "disabled_by": None, "domain": "test", "entry_id": entries[0].entry_id, "error_reason_translation_key": None, "error_reason_translation_placeholders": None, + "modified_at": timestamp, "pref_disable_new_entities": False, "pref_disable_polling": False, "reason": None, diff --git a/tests/components/config/test_core.py b/tests/components/config/test_core.py index 7d02063b2b9..4550f2e08e5 100644 --- a/tests/components/config/test_core.py +++ b/tests/components/config/test_core.py @@ -5,11 +5,11 @@ from unittest.mock import Mock, patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import core from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index 0717bb6046d..aab898f5fd6 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -1,5 +1,8 @@ """Test device_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered @@ -7,6 +10,7 @@ from homeassistant.components.config import device_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component +from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, MockModule, mock_integration from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -26,6 +30,7 @@ async def client_fixture( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_devices( hass: HomeAssistant, client: MockHAClientWebSocket, @@ -61,6 +66,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -68,6 +74,8 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -80,6 +88,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -87,6 +96,8 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -111,6 +122,7 @@ async def test_list_devices( "config_entries": [entry.entry_id], "configuration_url": None, "connections": [["ethernet", "12:34:56:78:90:AB:CD:EF"]], + "created_at": utcnow().timestamp(), "disabled_by": None, "entry_type": None, "hw_version": None, @@ -119,6 +131,8 @@ async def test_list_devices( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().timestamp(), "name_by_user": None, "name": None, "primary_config_entry": entry.entry_id, @@ -148,12 +162,15 @@ async def test_update_device( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, payload_key: str, payload_value: str | dr.DeviceEntryDisabler | None, ) -> None: """Test update entry.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -164,6 +181,9 @@ async def test_update_device( assert not getattr(device, payload_key) + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "type": "config/device_registry/update", @@ -183,6 +203,12 @@ async def test_update_device( assert msg["result"][payload_key] == payload_value assert getattr(device, payload_key) == payload_value + for key, value in ( + ("created_at", created_at), + ("modified_at", modified_at if payload_value is not None else created_at), + ): + assert msg["result"][key] == value.timestamp() + assert getattr(device, key) == value assert isinstance(device.disabled_by, (dr.DeviceEntryDisabler, type(None))) @@ -191,10 +217,13 @@ async def test_update_device_labels( hass: HomeAssistant, client: MockHAClientWebSocket, device_registry: dr.DeviceRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry labels.""" entry = MockConfigEntry(title=None) entry.add_to_hass(hass) + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, connections={("ethernet", "12:34:56:78:90:AB:CD:EF")}, @@ -204,6 +233,8 @@ async def test_update_device_labels( ) assert not device.labels + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -224,6 +255,12 @@ async def test_update_device_labels( assert msg["result"]["labels"] == unordered(["label1", "label2"]) assert device.labels == {"label1", "label2"} + for key, value in ( + ("created_at", created_at), + ("modified_at", modified_at), + ): + assert msg["result"][key] == value.timestamp() + assert getattr(device, key) == value async def test_remove_config_entry_from_device( @@ -423,3 +460,91 @@ async def test_remove_config_entry_from_device_fails( assert not response["success"] assert response["error"]["code"] == "home_assistant_error" assert response["error"]["message"] == "Integration not found" + + +async def test_remove_config_entry_from_device_if_integration_remove( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, +) -> None: + """Test removing config entry from device doesn't lead to an error when the integration removes the entry.""" + assert await async_setup_component(hass, "config", {}) + ws_client = await hass_ws_client(hass) + + can_remove = False + + async def async_remove_config_entry_device(hass, config_entry, device_entry): + if can_remove: + device_registry.async_update_device( + device_entry.id, remove_config_entry_id=config_entry.entry_id + ) + return can_remove + + mock_integration( + hass, + MockModule( + "comp1", async_remove_config_entry_device=async_remove_config_entry_device + ), + ) + mock_integration( + hass, + MockModule( + "comp2", async_remove_config_entry_device=async_remove_config_entry_device + ), + ) + + entry_1 = MockConfigEntry( + domain="comp1", + title="Test 1", + source="bla", + ) + entry_1.supports_remove_device = True + entry_1.add_to_hass(hass) + + entry_2 = MockConfigEntry( + domain="comp1", + title="Test 1", + source="bla", + ) + entry_2.supports_remove_device = True + entry_2.add_to_hass(hass) + + device_registry.async_get_or_create( + config_entry_id=entry_1.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + device_entry = device_registry.async_get_or_create( + config_entry_id=entry_2.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + assert device_entry.config_entries == {entry_1.entry_id, entry_2.entry_id} + + # Try removing a config entry from the device, it should fail because + # async_remove_config_entry_device returns False + response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) + + assert not response["success"] + assert response["error"]["code"] == "home_assistant_error" + + # Make async_remove_config_entry_device return True + can_remove = True + + # Remove the 1st config entry + response = await ws_client.remove_device(device_entry.id, entry_1.entry_id) + + assert response["success"] + assert response["result"]["config_entries"] == [entry_2.entry_id] + + # Check that the config entry was removed from the device + assert device_registry.async_get(device_entry.id).config_entries == { + entry_2.entry_id + } + + # Remove the 2nd config entry + response = await ws_client.remove_device(device_entry.id, entry_2.entry_id) + + assert response["success"] + assert response["result"] is None + + # This was the last config entry, the device is removed + assert not device_registry.async_get(device_entry.id) diff --git a/tests/components/config/test_entity_registry.py b/tests/components/config/test_entity_registry.py index 813ec654abb..60657d4a77b 100644 --- a/tests/components/config/test_entity_registry.py +++ b/tests/components/config/test_entity_registry.py @@ -1,5 +1,8 @@ """Test entity_registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered @@ -13,6 +16,7 @@ from homeassistant.helpers.entity_registry import ( RegistryEntryDisabler, RegistryEntryHider, ) +from homeassistant.util.dt import utcnow from tests.common import ( ANY, @@ -33,6 +37,7 @@ async def client( return await hass_ws_client(hass) +@pytest.mark.usefixtures("freezer") async def test_list_entities( hass: HomeAssistant, client: MockHAClientWebSocket ) -> None: @@ -62,6 +67,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -71,6 +77,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -82,6 +89,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -91,6 +99,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": None, "options": {}, "original_name": None, @@ -129,6 +138,7 @@ async def test_list_entities( "area_id": None, "categories": {}, "config_entry_id": None, + "created_at": utcnow().timestamp(), "device_id": None, "disabled_by": None, "entity_category": None, @@ -138,6 +148,7 @@ async def test_list_entities( "icon": None, "id": ANY, "labels": [], + "modified_at": utcnow().timestamp(), "name": "Hello World", "options": {}, "original_name": None, @@ -325,6 +336,8 @@ async def test_list_entities_for_display( async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" + name_created_at = datetime(1994, 2, 14, 12, 0, 0) + no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -333,11 +346,15 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> unique_id="1234", platform="test_platform", name="Hello World", + created_at=name_created_at, + modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", + created_at=no_name_created_at, + modified_at=no_name_created_at, ), }, ) @@ -353,6 +370,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -363,6 +381,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], + "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -387,6 +406,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -397,6 +417,7 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> "icon": None, "id": ANY, "labels": [], + "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -410,6 +431,8 @@ async def test_get_entity(hass: HomeAssistant, client: MockHAClientWebSocket) -> async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) -> None: """Test get entry.""" + name_created_at = datetime(1994, 2, 14, 12, 0, 0) + no_name_created_at = datetime(2024, 2, 14, 12, 0, 1) mock_registry( hass, { @@ -418,11 +441,15 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) unique_id="1234", platform="test_platform", name="Hello World", + created_at=name_created_at, + modified_at=name_created_at, ), "test_domain.no_name": RegistryEntry( entity_id="test_domain.no_name", unique_id="6789", platform="test_platform", + created_at=no_name_created_at, + modified_at=no_name_created_at, ), }, ) @@ -446,6 +473,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -456,6 +484,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], + "modified_at": name_created_at.timestamp(), "name": "Hello World", "options": {}, "original_device_class": None, @@ -471,6 +500,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": no_name_created_at.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -481,6 +511,7 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) "icon": None, "id": ANY, "labels": [], + "modified_at": no_name_created_at.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -495,9 +526,11 @@ async def test_get_entities(hass: HomeAssistant, client: MockHAClientWebSocket) async def test_update_entity( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test updating entity.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) registry = mock_registry( hass, { @@ -520,6 +553,9 @@ async def test_update_entity( assert state.name == "before update" assert state.attributes[ATTR_ICON] == "icon:before update" + modified = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") + freezer.move_to(modified) + # Update area, categories, device_class, hidden_by, icon, labels & name await client.send_json_auto_id( { @@ -544,6 +580,7 @@ async def test_update_entity( "area_id": "mock-area-id", "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, + "created_at": created.timestamp(), "config_entry_id": None, "device_class": "custom_device_class", "device_id": None, @@ -555,6 +592,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -570,6 +608,9 @@ async def test_update_entity( assert state.name == "after update" assert state.attributes[ATTR_ICON] == "icon:after update" + modified = datetime.fromisoformat("2024-07-20T00:00:00.900075+00:00") + freezer.move_to(modified) + # Update hidden_by to illegal value await client.send_json_auto_id( { @@ -597,9 +638,13 @@ async def test_update_entity( assert msg["success"] assert hass.states.get("test_domain.world") is None - assert ( - registry.entities["test_domain.world"].disabled_by is RegistryEntryDisabler.USER - ) + entry = registry.entities["test_domain.world"] + assert entry.disabled_by is RegistryEntryDisabler.USER + assert entry.created_at == created + assert entry.modified_at == modified + + modified = datetime.fromisoformat("2024-07-21T00:00:00.900075+00:00") + freezer.move_to(modified) # Update disabled_by to None await client.send_json_auto_id( @@ -619,6 +664,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -629,6 +675,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {}, "original_device_class": None, @@ -641,6 +688,9 @@ async def test_update_entity( "require_restart": True, } + modified = datetime.fromisoformat("2024-07-22T00:00:00.900075+00:00") + freezer.move_to(modified) + # Update entity option await client.send_json_auto_id( { @@ -660,6 +710,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -670,6 +721,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -681,6 +733,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-23T00:00:00.900075+00:00") + freezer.move_to(modified) + # Add a category to the entity await client.send_json_auto_id( { @@ -700,6 +755,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -710,6 +766,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -721,6 +778,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-24T00:00:00.900075+00:00") + freezer.move_to(modified) + # Move the entity to a different category await client.send_json_auto_id( { @@ -740,6 +800,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope2": "id", "scope3": "other_id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -750,6 +811,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -761,6 +823,9 @@ async def test_update_entity( }, } + modified = datetime.fromisoformat("2024-07-23T10:00:00.900075+00:00") + freezer.move_to(modified) + # Move the entity to a different category await client.send_json_auto_id( { @@ -780,6 +845,7 @@ async def test_update_entity( "capabilities": None, "categories": {"scope1": "id", "scope3": "other_id"}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": "custom_device_class", "device_id": None, "disabled_by": None, @@ -790,6 +856,7 @@ async def test_update_entity( "icon": "icon:after update", "id": ANY, "labels": unordered(["label1", "label2"]), + "modified_at": modified.timestamp(), "name": "after update", "options": {"sensor": {"unit_of_measurement": "beard_second"}}, "original_device_class": None, @@ -803,9 +870,11 @@ async def test_update_entity( async def test_update_entity_require_restart( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test updating entity.""" + created = datetime.fromisoformat("2024-02-14T12:00:00+00:00") + freezer.move_to(created) entity_id = "test_domain.test_platform_1234" config_entry = MockConfigEntry(domain="test_platform") config_entry.add_to_hass(hass) @@ -817,6 +886,9 @@ async def test_update_entity_require_restart( state = hass.states.get(entity_id) assert state is not None + modified = datetime.fromisoformat("2024-07-20T13:30:00+00:00") + freezer.move_to(modified) + # UPDATE DISABLED_BY TO NONE await client.send_json_auto_id( { @@ -835,6 +907,7 @@ async def test_update_entity_require_restart( "capabilities": None, "categories": {}, "config_entry_id": config_entry.entry_id, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -845,6 +918,7 @@ async def test_update_entity_require_restart( "icon": None, "id": ANY, "labels": [], + "modified_at": created.timestamp(), "name": None, "options": {}, "original_device_class": None, @@ -909,9 +983,11 @@ async def test_enable_entity_disabled_device( async def test_update_entity_no_changes( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test update entity with no changes.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) mock_registry( hass, { @@ -932,6 +1008,9 @@ async def test_update_entity_no_changes( assert state is not None assert state.name == "name of entity" + modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") + freezer.move_to(modified) + await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -949,6 +1028,7 @@ async def test_update_entity_no_changes( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -959,6 +1039,7 @@ async def test_update_entity_no_changes( "icon": None, "id": ANY, "labels": [], + "modified_at": created.timestamp(), "name": "name of entity", "options": {}, "original_device_class": None, @@ -1002,9 +1083,11 @@ async def test_update_nonexisting_entity(client: MockHAClientWebSocket) -> None: async def test_update_entity_id( - hass: HomeAssistant, client: MockHAClientWebSocket + hass: HomeAssistant, client: MockHAClientWebSocket, freezer: FrozenDateTimeFactory ) -> None: """Test update entity id.""" + created = datetime.fromisoformat("2024-02-14T12:00:00.900075+00:00") + freezer.move_to(created) mock_registry( hass, { @@ -1022,6 +1105,9 @@ async def test_update_entity_id( assert hass.states.get("test_domain.world") is not None + modified = datetime.fromisoformat("2024-07-20T13:30:00.900075+00:00") + freezer.move_to(modified) + await client.send_json_auto_id( { "type": "config/entity_registry/update", @@ -1039,6 +1125,7 @@ async def test_update_entity_id( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": created.timestamp(), "device_class": None, "device_id": None, "disabled_by": None, @@ -1049,6 +1136,7 @@ async def test_update_entity_id( "icon": None, "id": ANY, "labels": [], + "modified_at": modified.timestamp(), "name": None, "options": {}, "original_device_class": None, diff --git a/tests/components/config/test_floor_registry.py b/tests/components/config/test_floor_registry.py index b4e3907bc4d..da6e550b1f6 100644 --- a/tests/components/config/test_floor_registry.py +++ b/tests/components/config/test_floor_registry.py @@ -1,11 +1,15 @@ """Test floor registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from pytest_unordered import unordered from homeassistant.components.config import floor_registry from homeassistant.core import HomeAssistant from homeassistant.helpers import floor_registry as fr +from homeassistant.util.dt import utcnow from tests.typing import MockHAClientWebSocket, WebSocketGenerator @@ -22,9 +26,15 @@ async def client_fixture( async def test_list_floors( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) floor_registry.async_create("First floor") + + created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_2) floor_registry.async_create( name="Second floor", aliases={"top floor", "attic"}, @@ -34,6 +44,12 @@ async def test_list_floors( assert len(floor_registry.floors) == 2 + # update first floor to change modified_at + floor_registry.async_update( + "first_floor", + name="First floor...", + ) + await client.send_json_auto_id({"type": "config/floor_registry/list"}) msg = await client.receive_json() @@ -41,20 +57,25 @@ async def test_list_floors( assert len(msg["result"]) == len(floor_registry.floors) assert msg["result"][0] == { "aliases": [], + "created_at": created_1.timestamp(), "icon": None, "floor_id": "first_floor", - "name": "First floor", + "modified_at": created_2.timestamp(), + "name": "First floor...", "level": None, } assert msg["result"][1] == { "aliases": unordered(["top floor", "attic"]), + "created_at": created_2.timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", + "modified_at": created_2.timestamp(), "name": "Second floor", "level": 2, } +@pytest.mark.usefixtures("freezer") async def test_create_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, @@ -69,8 +90,10 @@ async def test_create_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], + "created_at": utcnow().timestamp(), "icon": None, "floor_id": "first_floor", + "modified_at": utcnow().timestamp(), "name": "First floor", "level": None, } @@ -90,8 +113,10 @@ async def test_create_floor( assert len(floor_registry.floors) == 2 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), + "created_at": utcnow().timestamp(), "icon": "mdi:home-floor-2", "floor_id": "second_floor", + "modified_at": utcnow().timestamp(), "name": "Second floor", "level": 2, } @@ -163,10 +188,15 @@ async def test_delete_non_existing_floor( async def test_update_floor( client: MockHAClientWebSocket, floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) floor = floor_registry.async_create("First floor") assert len(floor_registry.floors) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { @@ -184,12 +214,16 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": unordered(["top floor", "attic"]), + "created_at": created_at.timestamp(), "icon": "mdi:home-floor-2", "floor_id": floor.floor_id, + "modified_at": modified_at.timestamp(), "name": "Second floor", "level": 2, } + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) await client.send_json_auto_id( { "floor_id": floor.floor_id, @@ -206,8 +240,10 @@ async def test_update_floor( assert len(floor_registry.floors) == 1 assert msg["result"] == { "aliases": [], + "created_at": created_at.timestamp(), "icon": None, "floor_id": floor.floor_id, + "modified_at": modified_at.timestamp(), "name": "First floor", "level": None, } diff --git a/tests/components/config/test_label_registry.py b/tests/components/config/test_label_registry.py index 040b3bfe28a..3eff759132f 100644 --- a/tests/components/config/test_label_registry.py +++ b/tests/components/config/test_label_registry.py @@ -1,5 +1,8 @@ """Test label registry API.""" +from datetime import datetime + +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.config import label_registry @@ -21,9 +24,15 @@ async def client_fixture( async def test_list_labels( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test list entries.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) label_registry.async_create("mock 1") + + created_2 = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(created_2) label_registry.async_create( name="mock 2", color="#00FF00", @@ -33,6 +42,12 @@ async def test_list_labels( assert len(label_registry.labels) == 2 + # update mock 1 to change modified_at + label_registry.async_update( + "mock_1", + name="Mock 1...", + ) + await client.send_json_auto_id({"type": "config/label_registry/list"}) msg = await client.receive_json() @@ -40,16 +55,20 @@ async def test_list_labels( assert len(msg["result"]) == len(label_registry.labels) assert msg["result"][0] == { "color": None, + "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock_1", - "name": "mock 1", + "modified_at": created_2.timestamp(), + "name": "Mock 1...", } assert msg["result"][1] == { "color": "#00FF00", + "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mock_2", + "modified_at": created_2.timestamp(), "name": "mock 2", } @@ -57,8 +76,11 @@ async def test_list_labels( async def test_create_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test create entry.""" + created_1 = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_1) await client.send_json_auto_id( { "name": "MOCK", @@ -71,12 +93,16 @@ async def test_create_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, + "created_at": created_1.timestamp(), "description": None, "icon": None, "label_id": "mock", "name": "MOCK", + "modified_at": created_1.timestamp(), } + created_2 = datetime.fromisoformat("2024-07-17T13:30:00.900075+00:00") + freezer.move_to(created_2) await client.send_json_auto_id( { "id": 2, @@ -93,12 +119,16 @@ async def test_create_label( assert len(label_registry.labels) == 2 assert msg["result"] == { "color": "#00FF00", + "created_at": created_2.timestamp(), "description": "This is the second label", "icon": "mdi:two", "label_id": "mockery", + "modified_at": created_2.timestamp(), "name": "MOCKERY", } + created_3 = datetime.fromisoformat("2024-07-18T13:30:00.900075+00:00") + freezer.move_to(created_3) await client.send_json_auto_id( { "name": "MAGIC", @@ -114,9 +144,11 @@ async def test_create_label( assert len(label_registry.labels) == 3 assert msg["result"] == { "color": "indigo", + "created_at": created_3.timestamp(), "description": "This is the third label", "icon": "mdi:three", "label_id": "magic", + "modified_at": created_3.timestamp(), "name": "MAGIC", } @@ -182,11 +214,17 @@ async def test_delete_non_existing_label( async def test_update_label( client: MockHAClientWebSocket, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test update entry.""" + created_at = datetime.fromisoformat("2024-07-16T13:30:00.900075+00:00") + freezer.move_to(created_at) label = label_registry.async_create("mock") assert len(label_registry.labels) == 1 + modified_at = datetime.fromisoformat("2024-07-16T13:45:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -203,12 +241,17 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "#00FF00", + "created_at": created_at.timestamp(), "description": "This is a label description", "icon": "mdi:test", "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED", } + modified_at = datetime.fromisoformat("2024-07-16T13:50:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -225,12 +268,17 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": None, + "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED AGAIN", } + modified_at = datetime.fromisoformat("2024-07-16T13:55:00.900075+00:00") + freezer.move_to(modified_at) + await client.send_json_auto_id( { "label_id": label.label_id, @@ -247,9 +295,11 @@ async def test_update_label( assert len(label_registry.labels) == 1 assert msg["result"] == { "color": "primary", + "created_at": created_at.timestamp(), "description": None, "icon": None, "label_id": "mock", + "modified_at": modified_at.timestamp(), "name": "UPDATED YET AGAIN", } diff --git a/tests/components/config/test_scene.py b/tests/components/config/test_scene.py index 22bcfa345a2..c4c207f33f9 100644 --- a/tests/components/config/test_scene.py +++ b/tests/components/config/test_scene.py @@ -7,11 +7,11 @@ from unittest.mock import ANY, patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import scene from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator diff --git a/tests/components/config/test_script.py b/tests/components/config/test_script.py index 4771576ed6e..88245eb567f 100644 --- a/tests/components/config/test_script.py +++ b/tests/components/config/test_script.py @@ -7,12 +7,12 @@ from unittest.mock import patch import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import config from homeassistant.components.config import script from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from homeassistant.util import yaml from tests.typing import ClientSessionGenerator diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 42746525a0d..7d15bde88c0 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator +from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant @@ -21,9 +21,9 @@ if TYPE_CHECKING: from .switch.common import MockSwitch -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("zeroconf") is not None) def patch_zeroconf_multiple_catcher() -> Generator[None]: - """Patch zeroconf wrapper that detects if multiple instances are used.""" + """If installed, patch zeroconf wrapper that detects if multiple instances are used.""" with patch( "homeassistant.components.zeroconf.install_multiple_zeroconf_catcher", side_effect=lambda zc: None, @@ -124,9 +124,9 @@ def mock_conversation_agent_fixture(hass: HomeAssistant) -> MockAgent: return mock_conversation_agent_fixture_helper(hass) -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="session", autouse=find_spec("ffmpeg") is not None) def prevent_ffmpeg_subprocess() -> Generator[None]: - """Prevent ffmpeg from creating a subprocess.""" + """If installed, prevent ffmpeg from creating a subprocess.""" with patch( "homeassistant.components.ffmpeg.FFVersion.get_version", return_value="6.0" ): diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr new file mode 100644 index 00000000000..d015b19ddc1 --- /dev/null +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -0,0 +1,686 @@ +# serializer version: 1 +# name: test_custom_sentences + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a stout', + }), + }), + }), + }) +# --- +# name: test_custom_sentences.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en-us', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'You ordered a lager', + }), + }), + }), + }) +# --- +# name: test_custom_sentences_config + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Stealth mode engaged', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_alias_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added alias', + }), + }), + }), + }) +# --- +# name: test_intent_conversion_not_expose_new + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_conversion_not_expose_new.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.late', + 'name': 'friendly light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_added_removed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called late added light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called my cool light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called my cool light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed.4 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_exposed.5 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_fail_if_unexposed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_remove_custom_name.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called renamed light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed.1 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'renamed light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed.2 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called kitchen light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed.3 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen light', + 'type': , + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_intent_entity_renamed.4 + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_valid_targets', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Sorry, I am not aware of any device called renamed light', + }), + }), + }), + }) +# --- diff --git a/tests/components/conversation/snapshots/test_http.ambr b/tests/components/conversation/snapshots/test_http.ambr new file mode 100644 index 00000000000..fd02646df48 --- /dev/null +++ b/tests/components/conversation/snapshots/test_http.ambr @@ -0,0 +1,711 @@ +# serializer version: 1 +# name: test_get_agent_list + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'af', + 'ar', + 'bg', + 'bn', + 'ca', + 'cs', + 'da', + 'de', + 'de-CH', + 'el', + 'en', + 'es', + 'et', + 'eu', + 'fa', + 'fi', + 'fr', + 'fr-CA', + 'gl', + 'gu', + 'he', + 'hi', + 'hr', + 'hu', + 'id', + 'is', + 'it', + 'ka', + 'kn', + 'ko', + 'lb', + 'lt', + 'lv', + 'ml', + 'mn', + 'ms', + 'nb', + 'nl', + 'pl', + 'pt', + 'pt-br', + 'ro', + 'ru', + 'sk', + 'sl', + 'sr', + 'sv', + 'sw', + 'te', + 'tr', + 'uk', + 'ur', + 'vi', + 'zh-cn', + 'zh-hk', + 'zh-tw', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.1 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + 'smurfish', + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.2 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.3 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'en', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.4 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de', + 'de-CH', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_get_agent_list.5 + dict({ + 'agents': list([ + dict({ + 'id': 'conversation.home_assistant', + 'name': 'Home Assistant', + 'supported_languages': list([ + 'de-CH', + 'de', + ]), + }), + dict({ + 'id': 'mock-entry', + 'name': 'Mock Title', + 'supported_languages': list([ + ]), + }), + dict({ + 'id': 'mock-entry-support-all', + 'name': 'Mock Title', + 'supported_languages': '*', + }), + ]), + }) +# --- +# name: test_http_api_handle_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'failed_to_handle', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_api_no_match + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_http_api_unexpected_failure + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'unknown', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'An unexpected error occurred', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[None] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[conversation.home_assistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_http_processing_intent[homeassistant] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'failed': list([ + ]), + 'success': list([ + dict({ + 'id': 'light.kitchen', + 'name': 'kitchen', + 'type': 'entity', + }), + ]), + 'targets': list([ + ]), + }), + 'language': 'en', + 'response_type': 'action_done', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Turned on the light', + }), + }), + }), + }) +# --- +# name: test_ws_api[payload0] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload1] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload2] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload3] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload4] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'test-language', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_api[payload5] + dict({ + 'conversation_id': None, + 'response': dict({ + 'card': dict({ + }), + 'data': dict({ + 'code': 'no_intent_match', + }), + 'language': 'en', + 'response_type': 'error', + 'speech': dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': "Sorry, I couldn't understand that", + }), + }), + }), + }) +# --- +# name: test_ws_hass_agent_debug + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on ( | [in ])', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'my cool light', + 'value': 'my cool light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOff', + }), + 'match': True, + 'sentence_template': '[] ( | [in ]) [to] off', + 'slots': dict({ + 'name': 'my cool light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': '', + 'value': 'light', + }), + }), + 'intent': dict({ + 'name': 'HassTurnOn', + }), + 'match': True, + 'sentence_template': ' on [all] in ', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + dict({ + 'details': dict({ + 'area': dict({ + 'name': 'area', + 'text': 'kitchen', + 'value': 'kitchen', + }), + 'domain': dict({ + 'name': 'domain', + 'text': 'lights', + 'value': 'light', + }), + 'state': dict({ + 'name': 'state', + 'text': 'on', + 'value': 'on', + }), + }), + 'intent': dict({ + 'name': 'HassGetState', + }), + 'match': True, + 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', + 'slots': dict({ + 'area': 'kitchen', + 'domain': 'lights', + 'state': 'on', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.kitchen': dict({ + 'matched': False, + }), + }), + 'unmatched_slots': dict({ + }), + }), + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_custom_sentence + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'beer_style': dict({ + 'name': 'beer_style', + 'text': 'lager', + 'value': 'lager', + }), + }), + 'file': 'en/beer.yaml', + 'intent': dict({ + 'name': 'OrderBeer', + }), + 'match': True, + 'sentence_template': "I'd like to order a {beer_style} [please]", + 'slots': dict({ + 'beer_style': 'lager', + }), + 'source': 'custom', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_null_result + dict({ + 'results': list([ + None, + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'brightness': dict({ + 'name': 'brightness', + 'text': '100%', + 'value': 100, + }), + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': True, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'brightness': '100%', + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + 'light.demo_1234': dict({ + 'matched': True, + }), + }), + 'unmatched_slots': dict({ + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_out_of_range.1 + dict({ + 'results': list([ + dict({ + 'details': dict({ + 'name': dict({ + 'name': 'name', + 'text': 'test light', + 'value': 'test light', + }), + }), + 'intent': dict({ + 'name': 'HassLightSet', + }), + 'match': False, + 'sentence_template': '[] brightness [to] ', + 'slots': dict({ + 'name': 'test light', + }), + 'source': 'builtin', + 'targets': dict({ + }), + 'unmatched_slots': dict({ + 'brightness': 1001, + }), + }), + ]), + }) +# --- +# name: test_ws_hass_agent_debug_sentence_trigger + dict({ + 'results': list([ + dict({ + 'match': True, + 'sentence_template': 'hello[ world]', + 'source': 'trigger', + }), + ]), + }) +# --- diff --git a/tests/components/conversation/snapshots/test_init.ambr b/tests/components/conversation/snapshots/test_init.ambr index 6264e61863f..0327be064d4 100644 --- a/tests/components/conversation/snapshots/test_init.ambr +++ b/tests/components/conversation/snapshots/test_init.ambr @@ -24,81 +24,6 @@ }), }) # --- -# name: test_custom_sentences - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a stout', - }), - }), - }), - }) -# --- -# name: test_custom_sentences.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en-us', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'You ordered a lager', - }), - }), - }), - }) -# --- -# name: test_custom_sentences_config - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Stealth mode engaged', - }), - }), - }), - }) -# --- # name: test_get_agent_info dict({ 'id': 'conversation.home_assistant', @@ -117,918 +42,6 @@ 'name': 'Home Assistant', }) # --- -# name: test_get_agent_list - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'af', - 'ar', - 'bg', - 'bn', - 'ca', - 'cs', - 'da', - 'de', - 'de-CH', - 'el', - 'en', - 'es', - 'et', - 'eu', - 'fa', - 'fi', - 'fr', - 'fr-CA', - 'gl', - 'gu', - 'he', - 'hi', - 'hr', - 'hu', - 'id', - 'is', - 'it', - 'ka', - 'kn', - 'ko', - 'lb', - 'lt', - 'lv', - 'ml', - 'mn', - 'ms', - 'nb', - 'nl', - 'pl', - 'pt', - 'pt-br', - 'ro', - 'ru', - 'sk', - 'sl', - 'sr', - 'sv', - 'sw', - 'te', - 'tr', - 'uk', - 'ur', - 'vi', - 'zh-cn', - 'zh-hk', - 'zh-tw', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.1 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - 'smurfish', - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.2 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.3 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'en', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.4 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de', - 'de-CH', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_get_agent_list.5 - dict({ - 'agents': list([ - dict({ - 'id': 'conversation.home_assistant', - 'name': 'Home Assistant', - 'supported_languages': list([ - 'de-CH', - 'de', - ]), - }), - dict({ - 'id': 'mock-entry', - 'name': 'Mock Title', - 'supported_languages': list([ - ]), - }), - dict({ - 'id': 'mock-entry-support-all', - 'name': 'Mock Title', - 'supported_languages': '*', - }), - ]), - }) -# --- -# name: test_http_api_handle_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'failed_to_handle', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_api_no_match - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_http_api_unexpected_failure - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'unknown', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'An unexpected error occurred', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[None] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[conversation.home_assistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent[homeassistant] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_alias_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added alias', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_conversion_not_expose_new - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_conversion_not_expose_new.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.late', - 'name': 'friendly light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_added_removed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called late added light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_exposed.5 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'renamed light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_entity_renamed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', - }), - }), - }), - }) -# --- -# name: test_http_processing_intent_target_ha_agent - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen', - 'type': 'entity', - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- # name: test_turn_on_intent[None-turn kitchen on-None] dict({ 'conversation_id': None, @@ -1389,361 +402,3 @@ }), }) # --- -# name: test_ws_api[payload0] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload1] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload2] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload3] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload4] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'test-language', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_api[payload5] - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_intent_match', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': "Sorry, I couldn't understand that", - }), - }), - }), - }) -# --- -# name: test_ws_hass_agent_debug - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on ( | [in ])', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'my cool light', - 'value': 'my cool light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOff', - }), - 'match': True, - 'sentence_template': '[] ( | [in ]) [to] off', - 'slots': dict({ - 'name': 'my cool light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': '', - 'value': 'light', - }), - }), - 'intent': dict({ - 'name': 'HassTurnOn', - }), - 'match': True, - 'sentence_template': ' on [all] in ', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - dict({ - 'details': dict({ - 'area': dict({ - 'name': 'area', - 'text': 'kitchen', - 'value': 'kitchen', - }), - 'domain': dict({ - 'name': 'domain', - 'text': 'lights', - 'value': 'light', - }), - 'state': dict({ - 'name': 'state', - 'text': 'on', - 'value': 'on', - }), - }), - 'intent': dict({ - 'name': 'HassGetState', - }), - 'match': True, - 'sentence_template': '[tell me] how many {on_off_domains:domain} (is|are) {on_off_states:state} [in ]', - 'slots': dict({ - 'area': 'kitchen', - 'domain': 'lights', - 'state': 'on', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.kitchen': dict({ - 'matched': False, - }), - }), - 'unmatched_slots': dict({ - }), - }), - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_custom_sentence - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'beer_style': dict({ - 'name': 'beer_style', - 'text': 'lager', - 'value': 'lager', - }), - }), - 'file': 'en/beer.yaml', - 'intent': dict({ - 'name': 'OrderBeer', - }), - 'match': True, - 'sentence_template': "I'd like to order a {beer_style} [please]", - 'slots': dict({ - 'beer_style': 'lager', - }), - 'source': 'custom', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_null_result - dict({ - 'results': list([ - None, - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'brightness': dict({ - 'name': 'brightness', - 'text': '100%', - 'value': 100, - }), - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': True, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'brightness': '100%', - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - 'light.demo_1234': dict({ - 'matched': True, - }), - }), - 'unmatched_slots': dict({ - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_out_of_range.1 - dict({ - 'results': list([ - dict({ - 'details': dict({ - 'name': dict({ - 'name': 'name', - 'text': 'test light', - 'value': 'test light', - }), - }), - 'intent': dict({ - 'name': 'HassLightSet', - }), - 'match': False, - 'sentence_template': '[] brightness [to] ', - 'slots': dict({ - 'name': 'test light', - }), - 'source': 'builtin', - 'targets': dict({ - }), - 'unmatched_slots': dict({ - 'brightness': 1001, - }), - }), - ]), - }) -# --- -# name: test_ws_hass_agent_debug_sentence_trigger - dict({ - 'results': list([ - dict({ - 'match': True, - 'sentence_template': 'hello[ world]', - 'source': 'trigger', - }), - ]), - }) -# --- diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index f8a021475d5..315b73bacfd 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -1,14 +1,20 @@ """Test for the default agent.""" from collections import defaultdict +import os +import tempfile from typing import Any from unittest.mock import AsyncMock, patch from hassil.recognize import Intent, IntentData, MatchEntity, RecognizeResult import pytest +from syrupy import SnapshotAssertion +import yaml from homeassistant.components import conversation, cover, media_player from homeassistant.components.conversation import default_agent +from homeassistant.components.conversation.models import ConversationInput +from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.homeassistant.exposed_entities import ( async_get_assistant_settings, ) @@ -17,21 +23,52 @@ from homeassistant.components.intent import ( TimerInfo, async_register_timer_handler, ) -from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, STATE_CLOSED -from homeassistant.core import DOMAIN as HASS_DOMAIN, Context, HomeAssistant, callback +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + STATE_CLOSED, + STATE_ON, + STATE_UNKNOWN, + EntityCategory, +) +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + Context, + HomeAssistant, + callback, +) from homeassistant.helpers import ( area_registry as ar, device_registry as dr, - entity, entity_registry as er, floor_registry as fr, intent, ) from homeassistant.setup import async_setup_component -from . import expose_entity +from . import expose_entity, expose_new -from tests.common import MockConfigEntry, async_mock_service +from tests.common import ( + MockConfigEntry, + MockUser, + async_mock_service, + setup_test_component_platform, +) +from tests.components.light.common import MockLight + + +class OrderBeerIntentHandler(intent.IntentHandler): + """Handle OrderBeer intent.""" + + intent_type = "OrderBeer" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return speech response.""" + beer_style = intent_obj.slots["beer_style"]["value"] + response = intent_obj.create_response() + response.async_set_speech(f"You ordered a {beer_style}") + return response @pytest.fixture @@ -47,8 +84,8 @@ async def init_components(hass: HomeAssistant) -> None: [ {"hidden_by": er.RegistryEntryHider.USER}, {"hidden_by": er.RegistryEntryHider.INTEGRATION}, - {"entity_category": entity.EntityCategory.CONFIG}, - {"entity_category": entity.EntityCategory.DIAGNOSTIC}, + {"entity_category": EntityCategory.CONFIG}, + {"entity_category": EntityCategory.DIAGNOSTIC}, ], ) @pytest.mark.usefixtures("init_components") @@ -61,7 +98,7 @@ async def test_hidden_entities_skipped( "light", "demo", "1234", suggested_object_id="Test light", **er_kwargs ) hass.states.async_set("light.test_light", "off") - calls = async_mock_service(hass, HASS_DOMAIN, "turn_on") + calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, "turn_on") result = await conversation.async_converse( hass, "turn on test light", None, Context(), None ) @@ -173,6 +210,14 @@ async def test_conversation_agent(hass: HomeAssistant) -> None: ): assert agent.supported_languages == ["dwarvish", "elvish", "entish"] + state = hass.states.get(agent.entity_id) + assert state + assert state.state == STATE_UNKNOWN + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + async def test_expose_flag_automatically_set( hass: HomeAssistant, @@ -1350,3 +1395,685 @@ async def test_name_wildcard_lower_priority(hass: HomeAssistant) -> None: assert result.response.response_type == intent.IntentResponseType.ACTION_DONE assert not beer_handler.triggered assert food_handler.triggered + + +async def test_intent_entity_added_removed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities added later. + + We want to ensure that adding an entity later busts the cache + so that the new entity is available as well as any aliases. + """ + context = Context() + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + await hass.async_block_till_done() + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on my cool light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an entity + entity_registry.async_get_or_create( + "light", "demo", "5678", suggested_object_id="late" + ) + hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) + + result = await conversation.async_converse( + hass, "turn on friendly light", None, context + ) + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now add an alias + entity_registry.async_update_entity("light.late", aliases={"late added light"}) + + result = await conversation.async_converse( + hass, "turn on late added light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now delete the entity + hass.states.async_remove("light.late") + + result = await conversation.async_converse( + hass, "turn on late added light", None, context + ) + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_alias_added_removed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with aliases added later. + + We want to ensure that adding an alias later busts the cache + so that the new alias is available. + """ + context = Context() + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Add an alias + entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) + + result = await conversation.async_converse( + hass, "turn on late added alias", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Now remove the alieas + entity_registry.async_update_entity("light.kitchen", aliases={}) + + result = await conversation.async_converse( + hass, "turn on late added alias", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_entity_renamed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with entities renamed later. + + We want to ensure that renaming an entity later busts the cache + so that the new name is used. + """ + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + # Rename the entity + entity_registry.async_update_entity("light.kitchen", name="renamed light") + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on renamed light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_intent_entity_remove_custom_name( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that removing a custom name allows targeting the entity by its auto-generated name again.""" + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Should fail with auto-generated name + entity_registry.async_update_entity("light.kitchen", name="renamed light") + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Now clear the custom name + entity_registry.async_update_entity("light.kitchen", name=None) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert len(calls) == 1 + + result = await conversation.async_converse( + hass, "turn on renamed light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + +async def test_intent_entity_fail_if_unexposed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that an entity is not usable if unexposed.""" + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Unexpose the entity + expose_entity(hass, "light.kitchen", False) + await hass.async_block_till_done(wait_background_tasks=True) + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert len(calls) == 0 + + +async def test_intent_entity_exposed( + hass: HomeAssistant, + init_components, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API with manual expose. + + We want to ensure that manually exposing an entity later busts the cache + so that the new setting is used. + """ + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Unexpose, then expose the entity + expose_entity(hass, "light.kitchen", False) + await hass.async_block_till_done() + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert len(calls) == 1 + + +async def test_intent_conversion_not_expose_new( + hass: HomeAssistant, + init_components, + hass_admin_user: MockUser, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API when not exposing new entities.""" + # Disable exposing new entities to the default agent + expose_new(hass, False) + + context = Context() + entity = MockLight("kitchen light", STATE_ON) + entity._attr_unique_id = "1234" + entity.entity_id = "light.kitchen" + setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) + + assert await async_setup_component( + hass, + LIGHT_DOMAIN, + {LIGHT_DOMAIN: [{"platform": "test"}]}, + ) + await hass.async_block_till_done() + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "error" + + # Expose the entity + expose_entity(hass, "light.kitchen", True) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "turn on kitchen light", None, context + ) + + assert len(calls) == 1 + data = result.as_dict() + + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + + +async def test_custom_sentences( + hass: HomeAssistant, + init_components, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent.""" + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + # Don't use "en" to test loading custom sentences with language variants. + language = "en-us" + + # Invoke intent via HTTP API + for beer_style in ("stout", "lager"): + result = await conversation.async_converse( + hass, + f"I'd like to order a {beer_style}, please", + None, + Context(), + language=language, + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert ( + data["response"]["speech"]["plain"]["speech"] + == f"You ordered a {beer_style}" + ) + + +async def test_custom_sentences_config( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test custom sentences with a custom intent in config.""" + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, + ) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "StealthMode": {"speech": {"text": "Stealth mode engaged"}} + } + }, + ) + + # Invoke intent via HTTP API + result = await conversation.async_converse( + hass, "engage stealth mode", None, Context(), None + ) + + data = result.as_dict() + assert data == snapshot + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" + + +async def test_language_region(hass: HomeAssistant, init_components) -> None: + """Test regional languages.""" + hass.states.async_set("light.kitchen", "off") + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + # Add fake region + language = f"{hass.config.language}-YZ" + await hass.services.async_call( + "conversation", + "process", + { + conversation.ATTR_TEXT: "turn on the kitchen", + conversation.ATTR_LANGUAGE: language, + }, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.kitchen"]} + + +async def test_non_default_response(hass: HomeAssistant, init_components) -> None: + """Test intent response that is not the default.""" + hass.states.async_set("cover.front_door", "closed") + calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) + + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + result = await agent.async_process( + ConversationInput( + text="open the front door", + context=Context(), + conversation_id=None, + device_id=None, + language=hass.config.language, + agent_id=None, + ) + ) + assert len(calls) == 1 + assert result.response.speech["plain"]["speech"] == "Opened" + + +async def test_turn_on_area( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="stove" + ) + entity_registry.async_update_entity( + "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id + ) + hass.states.async_set("light.stove", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + basement_area = area_registry.async_create("basement") + device_registry.async_update_device(device.id, area_id=basement_area.id) + entity_registry.async_update_entity("light.stove", area_id=basement_area.id) + calls.clear() + + # Test that the area is updated + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 0 + + # Test the new area works + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on lights in the basement"}, + ) + await hass.async_block_till_done() + + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": ["light.stove"]} + + +async def test_light_area_same_name( + hass: HomeAssistant, + init_components, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test turning on a light with the same name as an area.""" + entry = MockConfigEntry(domain="test") + entry.add_to_hass(hass) + + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + kitchen_area = area_registry.async_create("kitchen") + device_registry.async_update_device(device.id, area_id=kitchen_area.id) + + kitchen_light = entity_registry.async_get_or_create( + "light", "demo", "1234", original_name="kitchen light" + ) + entity_registry.async_update_entity( + kitchen_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} + ) + + ceiling_light = entity_registry.async_get_or_create( + "light", "demo", "5678", original_name="ceiling light" + ) + entity_registry.async_update_entity( + ceiling_light.entity_id, area_id=kitchen_area.id + ) + hass.states.async_set( + ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} + ) + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + + await hass.services.async_call( + "conversation", + "process", + {conversation.ATTR_TEXT: "turn on kitchen light"}, + ) + await hass.async_block_till_done() + + # Should only turn on one light instead of all lights in the kitchen + assert len(calls) == 1 + call = calls[0] + assert call.domain == LIGHT_DOMAIN + assert call.service == "turn_on" + assert call.data == {"entity_id": [kitchen_light.entity_id]} + + +async def test_custom_sentences_priority( + hass: HomeAssistant, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from custom_sentences have priority over builtin intents/sentences.""" + with tempfile.NamedTemporaryFile( + mode="w+", + encoding="utf-8", + suffix=".yaml", + dir=os.path.join(hass.config.config_dir, "custom_sentences", "en"), + ) as custom_sentences_file: + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + yaml.dump( + { + "language": "en", + "intents": { + "CustomIntent": {"data": [{"sentences": ["turn on the lamp"]}]} + }, + }, + custom_sentences_file, + ) + custom_sentences_file.flush() + custom_sentences_file.seek(0) + + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "conversation", {}) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "CustomIntent": {"speech": {"text": "custom response"}} + } + }, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + result = await conversation.async_converse( + hass, + "turn on the lamp", + None, + Context(), + language=hass.config.language, + ) + + data = result.as_dict() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" + + +async def test_config_sentences_priority( + hass: HomeAssistant, + hass_admin_user: MockUser, + snapshot: SnapshotAssertion, +) -> None: + """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" + # Add a custom sentence that would match a builtin sentence. + # Custom sentences have priority. + assert await async_setup_component(hass, "homeassistant", {}) + assert await async_setup_component(hass, "intent", {}) + assert await async_setup_component( + hass, + "conversation", + {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, + ) + assert await async_setup_component(hass, "light", {}) + assert await async_setup_component( + hass, + "intent_script", + {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, + ) + + # Ensure that a "lamp" exists so that we can verify the custom intent + # overrides the builtin sentence. + hass.states.async_set("light.lamp", "off") + + result = await conversation.async_converse( + hass, + "turn on the lamp", + None, + Context(), + language=hass.config.language, + ) + data = result.as_dict() + assert data["response"]["response_type"] == "action_done" + assert data["response"]["speech"]["plain"]["speech"] == "custom response" diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index b1c4a6d51af..7bae9c43f70 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -1,7 +1,9 @@ """Test intents for the default agent.""" +from datetime import datetime from unittest.mock import patch +from freezegun import freeze_time import pytest from homeassistant.components import ( @@ -121,6 +123,34 @@ async def test_cover_set_position( assert call.data == {"entity_id": entity_id, cover.ATTR_POSITION: 50} +async def test_cover_device_class( + hass: HomeAssistant, + init_components, +) -> None: + """Test the open position for covers by device class.""" + await cover_intent.async_setup_intents(hass) + + entity_id = f"{cover.DOMAIN}.front" + hass.states.async_set( + entity_id, STATE_CLOSED, attributes={"device_class": "garage"} + ) + async_expose_entity(hass, conversation.DOMAIN, entity_id, True) + + # Open service + calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) + result = await conversation.async_converse( + hass, "open the garage door", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "Opened the garage" + assert len(calls) == 1 + call = calls[0] + assert call.data == {"entity_id": entity_id} + + async def test_valve_intents( hass: HomeAssistant, init_components, @@ -413,3 +443,28 @@ async def test_todo_add_item_fr( assert mock_handle.call_args.args intent_obj = mock_handle.call_args.args[0] assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine" + + +@freeze_time(datetime(year=2013, month=9, day=17, hour=1, minute=2)) +async def test_date_time( + hass: HomeAssistant, + init_components, +) -> None: + """Test the date and time intents.""" + result = await conversation.async_converse( + hass, "what is the date", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "September 17th, 2013" + + result = await conversation.async_converse( + hass, "what time is it", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "1:02 AM" diff --git a/tests/components/conversation/test_http.py b/tests/components/conversation/test_http.py new file mode 100644 index 00000000000..1431fd6c17b --- /dev/null +++ b/tests/components/conversation/test_http.py @@ -0,0 +1,524 @@ +"""The tests for the HTTP API of the Conversation component.""" + +from http import HTTPStatus +from typing import Any +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.conversation import default_agent +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ATTR_FRIENDLY_NAME +from homeassistant.core import HomeAssistant +from homeassistant.helpers import area_registry as ar, entity_registry as er, intent +from homeassistant.setup import async_setup_component + +from . import MockAgent + +from tests.common import async_mock_service +from tests.typing import ClientSessionGenerator, WebSocketGenerator + +AGENT_ID_OPTIONS = [ + None, + # Old value of conversation.HOME_ASSISTANT_AGENT, + "homeassistant", + # Current value of conversation.HOME_ASSISTANT_AGENT, + "conversation.home_assistant", +] + + +class OrderBeerIntentHandler(intent.IntentHandler): + """Handle OrderBeer intent.""" + + intent_type = "OrderBeer" + + async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: + """Return speech response.""" + beer_style = intent_obj.slots["beer_style"]["value"] + response = intent_obj.create_response() + response.async_set_speech(f"You ordered a {beer_style}") + return response + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_http_processing_intent( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + agent_id, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test processing intent via HTTP API.""" + # Add an alias + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) + hass.states.async_set("light.kitchen", "off") + + calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + client = await hass_client() + data: dict[str, Any] = {"text": "turn on my cool light"} + if agent_id: + data["agent_id"] = agent_id + resp = await client.post("/api/conversation/process", json=data) + + assert resp.status == HTTPStatus.OK + assert len(calls) == 1 + data = await resp.json() + + assert data == snapshot + + +async def test_http_api_no_match( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an intent match failure.""" + client = await hass_client() + + # Shouldn't match any intents + resp = await client.post("/api/conversation/process", json={"text": "do something"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "no_intent_match" + + +async def test_http_api_handle_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentHandleError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "failed_to_handle" + + +async def test_http_api_unexpected_failure( + hass: HomeAssistant, + init_components, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the HTTP conversation API with an unexpected error during handling.""" + client = await hass_client() + + hass.states.async_set("light.kitchen", "off") + + # Raise an "unexpected" error during intent handling + def async_handle_error(*args, **kwargs): + raise intent.IntentUnexpectedError + + with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): + resp = await client.post( + "/api/conversation/process", json={"text": "turn on the kitchen"} + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data == snapshot + assert data["response"]["response_type"] == "error" + assert data["response"]["data"]["code"] == "unknown" + + +async def test_http_api_wrong_data( + hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator +) -> None: + """Test the HTTP conversation API.""" + client = await hass_client() + + resp = await client.post("/api/conversation/process", json={"text": 123}) + assert resp.status == HTTPStatus.BAD_REQUEST + + resp = await client.post("/api/conversation/process", json={}) + assert resp.status == HTTPStatus.BAD_REQUEST + + +@pytest.mark.parametrize( + "payload", + [ + { + "text": "Test Text", + }, + { + "text": "Test Text", + "language": "test-language", + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + }, + { + "text": "Test Text", + "conversation_id": None, + }, + { + "text": "Test Text", + "conversation_id": "test-conv-id", + "language": "test-language", + }, + { + "text": "Test Text", + "agent_id": "homeassistant", + }, + ], +) +async def test_ws_api( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + payload, + snapshot: SnapshotAssertion, +) -> None: + """Test the Websocket conversation API.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/process", **payload}) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["response"]["data"]["code"] == "no_intent_match" + + +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_ws_prepare( + hass: HomeAssistant, init_components, hass_ws_client: WebSocketGenerator, agent_id +) -> None: + """Test the Websocket prepare conversation API.""" + agent = default_agent.async_get_default_agent(hass) + assert isinstance(agent, default_agent.DefaultAgent) + + # No intents should be loaded yet + assert not agent._lang_intents.get(hass.config.language) + + client = await hass_ws_client(hass) + + msg = {"type": "conversation/prepare"} + if agent_id is not None: + msg["agent_id"] = agent_id + await client.send_json_auto_id(msg) + + msg = await client.receive_json() + + assert msg["success"] + + # Intents should now be load + assert agent._lang_intents.get(hass.config.language) + + +async def test_get_agent_list( + hass: HomeAssistant, + init_components, + mock_conversation_agent: MockAgent, + mock_agent_support_all: MockAgent, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test getting agent info.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": "conversation/agent/list"}) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "smurfish"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "en-UK"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + await client.send_json_auto_id( + {"type": "conversation/agent/list", "language": "de", "country": "ch"} + ) + msg = await client.receive_json() + assert msg["type"] == "result" + assert msg["success"] + assert msg["result"] == snapshot + + +async def test_ws_hass_agent_debug( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command.""" + client = await hass_ws_client(hass) + + kitchen_area = area_registry.async_create("kitchen") + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity( + "light.kitchen", + aliases={"my cool light"}, + area_id=kitchen_area.id, + ) + await hass.async_block_till_done() + hass.states.async_set("light.kitchen", "off") + + on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") + + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "turn on my cool light", + "turn my cool light off", + "turn on all lights in the kitchen", + "how many lights are on in the kitchen?", + "this will not match anything", # None in results + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + # Last sentence should be a failed match + assert msg["result"]["results"][-1] is None + + # Light state should not have been changed + assert len(on_calls) == 0 + assert len(off_calls) == 0 + + +async def test_ws_hass_agent_debug_null_result( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a null result.""" + client = await hass_ws_client(hass) + + async def async_recognize(self, user_input, *args, **kwargs): + if user_input.text == "bad sentence": + return None + + return await self.async_recognize(user_input, *args, **kwargs) + + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", + async_recognize, + ): + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "bad sentence", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + assert msg["result"]["results"] == [None] + + +async def test_ws_hass_agent_debug_out_of_range( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with an out of range entity.""" + test_light = entity_registry.async_get_or_create("light", "demo", "1234") + hass.states.async_set( + test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} + ) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 100%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert results[0]["match"] + + # Brightness is out of range + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "set test light brightness to 1001%", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + results = msg["result"]["results"] + assert len(results) == 1 + assert not results[0]["match"] + + # Name matched, but brightness didn't + assert results[0]["slots"] == {"name": "test light"} + assert results[0]["unmatched_slots"] == {"brightness": 1001} + + +async def test_ws_hass_agent_debug_custom_sentence( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test homeassistant agent debug websocket command with a custom sentence.""" + # Expecting testing_config/custom_sentences/en/beer.yaml + intent.async_register(hass, OrderBeerIntentHandler()) + + client = await hass_ws_client(hass) + + # Brightness is in range (0-100) + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": [ + "I'd like to order a lager, please.", + ], + } + ) + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "custom" + assert debug_results[0].get("file") == "en/beer.yaml" + + +async def test_ws_hass_agent_debug_sentence_trigger( + hass: HomeAssistant, + init_components, + hass_ws_client: WebSocketGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test homeassistant agent debug websocket command with a sentence trigger.""" + calls = async_mock_service(hass, "test", "automation") + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": { + "platform": "conversation", + "command": ["hello", "hello[ world]"], + }, + "action": { + "service": "test.automation", + "data_template": {"data": "{{ trigger }}"}, + }, + } + }, + ) + + client = await hass_ws_client(hass) + + # Use trigger sentence + await client.send_json_auto_id( + { + "type": "conversation/agent/homeassistant/debug", + "sentences": ["hello world"], + } + ) + await hass.async_block_till_done() + + msg = await client.receive_json() + + assert msg["success"] + assert msg["result"] == snapshot + + debug_results = msg["result"].get("results", []) + assert len(debug_results) == 1 + assert debug_results[0].get("match") + assert debug_results[0].get("source") == "trigger" + assert debug_results[0].get("sentence_template") == "hello[ world]" + + # Trigger should not have been executed + assert len(calls) == 0 diff --git a/tests/components/conversation/test_init.py b/tests/components/conversation/test_init.py index dc940dba81b..34a8fce636d 100644 --- a/tests/components/conversation/test_init.py +++ b/tests/components/conversation/test_init.py @@ -1,42 +1,24 @@ """The tests for the Conversation component.""" from http import HTTPStatus -import os -import tempfile -from typing import Any from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol -import yaml from homeassistant.components import conversation from homeassistant.components.conversation import default_agent -from homeassistant.components.conversation.models import ConversationInput -from homeassistant.components.cover import SERVICE_OPEN_COVER from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_ON -from homeassistant.core import Context, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import ( - area_registry as ar, - device_registry as dr, - entity_registry as er, - intent, -) +from homeassistant.helpers import intent from homeassistant.setup import async_setup_component -from . import MockAgent, expose_entity, expose_new +from . import MockAgent -from tests.common import ( - MockConfigEntry, - MockUser, - async_mock_service, - setup_test_component_platform, -) -from tests.components.light.common import MockLight -from tests.typing import ClientSessionGenerator, WebSocketGenerator +from tests.common import MockUser, async_mock_service +from tests.typing import ClientSessionGenerator AGENT_ID_OPTIONS = [ None, @@ -47,460 +29,6 @@ AGENT_ID_OPTIONS = [ ] -class OrderBeerIntentHandler(intent.IntentHandler): - """Handle OrderBeer intent.""" - - intent_type = "OrderBeer" - - async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: - """Return speech response.""" - beer_style = intent_obj.slots["beer_style"]["value"] - response = intent_obj.create_response() - response.async_set_speech(f"You ordered a {beer_style}") - return response - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_http_processing_intent( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - agent_id, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API.""" - # Add an alias - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - data: dict[str, Any] = {"text": "turn on my cool light"} - if agent_id: - data["agent_id"] = agent_id - resp = await client.post("/api/conversation/process", json=data) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - - -async def test_http_processing_intent_target_ha_agent( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - mock_conversation_agent: MockAgent, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent can be processed via HTTP API with picking agent.""" - # Add an alias - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={"text": "turn on my cool light", "agent_id": "homeassistant"}, - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_http_processing_intent_entity_added_removed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities added later. - - We want to ensure that adding an entity later busts the cache - so that the new entity is available as well as any aliases. - """ - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - hass.states.async_set("light.kitchen", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an entity - entity_registry.async_get_or_create( - "light", "demo", "5678", suggested_object_id="late" - ) - hass.states.async_set("light.late", "off", {"friendly_name": "friendly light"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on friendly light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now add an alias - entity_registry.async_update_entity("light.late", aliases={"late added light"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now delete the entity - hass.states.async_remove("light.late") - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_alias_added_removed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with aliases added later. - - We want to ensure that adding an alias later busts the cache - so that the new alias is available. - """ - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"}) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Add an alias - entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added alias"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Now remove the alieas - entity_registry.async_update_entity("light.kitchen", aliases={}) - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on late added alias"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_entity_renamed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with entities renamed later. - - We want to ensure that renaming an entity later busts the cache - so that the new name is used. - """ - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Rename the entity - entity_registry.async_update_entity("light.kitchen", name="renamed light") - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on renamed light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Now clear the custom name - entity_registry.async_update_entity("light.kitchen", name=None) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on renamed light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - -async def test_http_processing_intent_entity_exposed( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API with manual expose. - - We want to ensure that manually exposing an entity later busts the cache - so that the new setting is used. - """ - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - entity_registry.async_update_entity("light.kitchen", aliases={"my cool light"}) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - # Unexpose the entity - expose_entity(hass, "light.kitchen", False) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Now expose the entity - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", json={"text": "turn on my cool light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - -async def test_http_processing_intent_conversion_not_expose_new( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, -) -> None: - """Test processing intent via HTTP API when not exposing new entities.""" - # Disable exposing new entities to the default agent - expose_new(hass, False) - - entity = MockLight("kitchen light", STATE_ON) - entity._attr_unique_id = "1234" - entity.entity_id = "light.kitchen" - setup_test_component_platform(hass, LIGHT_DOMAIN, [entity]) - - assert await async_setup_component( - hass, - LIGHT_DOMAIN, - {LIGHT_DOMAIN: [{"platform": "test"}]}, - ) - await hass.async_block_till_done() - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - client = await hass_client() - - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "error" - - # Expose the entity - expose_entity(hass, "light.kitchen", True) - await hass.async_block_till_done() - - resp = await client.post( - "/api/conversation/process", json={"text": "turn on kitchen light"} - ) - - assert resp.status == HTTPStatus.OK - assert len(calls) == 1 - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - - @pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) @pytest.mark.parametrize("sentence", ["turn on kitchen", "turn kitchen on"]) @pytest.mark.parametrize("conversation_id", ["my_new_conversation", None]) @@ -573,95 +101,7 @@ async def test_turn_off_intent(hass: HomeAssistant, init_components, sentence) - assert call.data == {"entity_id": ["light.kitchen"]} -async def test_http_api_no_match( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an intent match failure.""" - client = await hass_client() - - # Shouldn't match any intents - resp = await client.post("/api/conversation/process", json={"text": "do something"}) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "no_intent_match" - - -async def test_http_api_handle_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentHandleError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "failed_to_handle" - - -async def test_http_api_unexpected_failure( - hass: HomeAssistant, - init_components, - hass_client: ClientSessionGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test the HTTP conversation API with an unexpected error during handling.""" - client = await hass_client() - - hass.states.async_set("light.kitchen", "off") - - # Raise an "unexpected" error during intent handling - def async_handle_error(*args, **kwargs): - raise intent.IntentUnexpectedError - - with patch("homeassistant.helpers.intent.async_handle", new=async_handle_error): - resp = await client.post( - "/api/conversation/process", json={"text": "turn on the kitchen"} - ) - - assert resp.status == HTTPStatus.OK - data = await resp.json() - - assert data == snapshot - assert data["response"]["response_type"] == "error" - assert data["response"]["data"]["code"] == "unknown" - - -async def test_http_api_wrong_data( - hass: HomeAssistant, init_components, hass_client: ClientSessionGenerator -) -> None: - """Test the HTTP conversation API.""" - client = await hass_client() - - resp = await client.post("/api/conversation/process", json={"text": 123}) - assert resp.status == HTTPStatus.BAD_REQUEST - - resp = await client.post("/api/conversation/process", json={}) - assert resp.status == HTTPStatus.BAD_REQUEST - - +@pytest.mark.usefixtures("init_components") async def test_custom_agent( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -670,10 +110,6 @@ async def test_custom_agent( snapshot: SnapshotAssertion, ) -> None: """Test a custom conversation agent.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "intent", {}) - client = await hass_client() data = { @@ -702,162 +138,9 @@ async def test_custom_agent( ) -@pytest.mark.parametrize( - "payload", - [ - { - "text": "Test Text", - }, - { - "text": "Test Text", - "language": "test-language", - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - }, - { - "text": "Test Text", - "conversation_id": None, - }, - { - "text": "Test Text", - "conversation_id": "test-conv-id", - "language": "test-language", - }, - { - "text": "Test Text", - "agent_id": "homeassistant", - }, - ], -) -async def test_ws_api( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - payload, - snapshot: SnapshotAssertion, -) -> None: - """Test the Websocket conversation API.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/process", **payload}) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["response"]["data"]["code"] == "no_intent_match" - - -@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) -async def test_ws_prepare( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, agent_id -) -> None: - """Test the Websocket prepare conversation API.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - agent = default_agent.async_get_default_agent(hass) - assert isinstance(agent, default_agent.DefaultAgent) - - # No intents should be loaded yet - assert not agent._lang_intents.get(hass.config.language) - - client = await hass_ws_client(hass) - - msg = {"type": "conversation/prepare"} - if agent_id is not None: - msg["agent_id"] = agent_id - await client.send_json_auto_id(msg) - - msg = await client.receive_json() - - assert msg["success"] - - # Intents should now be load - assert agent._lang_intents.get(hass.config.language) - - -async def test_custom_sentences( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "intent", {}) - - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - # Don't use "en" to test loading custom sentences with language variants. - language = "en-us" - - # Invoke intent via HTTP API - client = await hass_client() - for beer_style in ("stout", "lager"): - resp = await client.post( - "/api/conversation/process", - json={ - "text": f"I'd like to order a {beer_style}, please", - "language": language, - }, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert ( - data["response"]["speech"]["plain"]["speech"] - == f"You ordered a {beer_style}" - ) - - -async def test_custom_sentences_config( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test custom sentences with a custom intent in config.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component( - hass, - "conversation", - {"conversation": {"intents": {"StealthMode": ["engage stealth mode"]}}}, - ) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "StealthMode": {"speech": {"text": "Stealth mode engaged"}} - } - }, - ) - - # Invoke intent via HTTP API - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={"text": "engage stealth mode"}, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data == snapshot - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "Stealth mode engaged" - - -async def test_prepare_reload(hass: HomeAssistant) -> None: +async def test_prepare_reload(hass: HomeAssistant, init_components) -> None: """Test calling the reload service.""" language = hass.config.language - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) # Load intents agent = default_agent.async_get_default_agent(hass) @@ -893,181 +176,7 @@ async def test_prepare_fail(hass: HomeAssistant) -> None: await agent.async_prepare("not-a-language") # Confirm no intents were loaded - assert not agent._lang_intents.get("not-a-language") - - -async def test_language_region(hass: HomeAssistant, init_components) -> None: - """Test calling the turn on intent.""" - hass.states.async_set("light.kitchen", "off") - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - # Add fake region - language = f"{hass.config.language}-YZ" - await hass.services.async_call( - "conversation", - "process", - { - conversation.ATTR_TEXT: "turn on the kitchen", - conversation.ATTR_LANGUAGE: language, - }, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.kitchen"]} - - -async def test_non_default_response(hass: HomeAssistant, init_components) -> None: - """Test intent response that is not the default.""" - hass.states.async_set("cover.front_door", "closed") - calls = async_mock_service(hass, "cover", SERVICE_OPEN_COVER) - - agent = default_agent.async_get_default_agent(hass) - assert isinstance(agent, default_agent.DefaultAgent) - - result = await agent.async_process( - ConversationInput( - text="open the front door", - context=Context(), - conversation_id=None, - device_id=None, - language=hass.config.language, - agent_id=None, - ) - ) - assert len(calls) == 1 - assert result.response.speech["plain"]["speech"] == "Opened" - - -async def test_turn_on_area( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="stove" - ) - entity_registry.async_update_entity( - "light.stove", aliases={"my stove light"}, area_id=kitchen_area.id - ) - hass.states.async_set("light.stove", "off") - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - basement_area = area_registry.async_create("basement") - device_registry.async_update_device(device.id, area_id=basement_area.id) - entity_registry.async_update_entity("light.stove", area_id=basement_area.id) - calls.clear() - - # Test that the area is updated - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the kitchen"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 0 - - # Test the new area works - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on lights in the basement"}, - ) - await hass.async_block_till_done() - - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": ["light.stove"]} - - -async def test_light_area_same_name( - hass: HomeAssistant, - init_components, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, -) -> None: - """Test turning on a light with the same name as an area.""" - entry = MockConfigEntry(domain="test") - entry.add_to_hass(hass) - - device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, - ) - - kitchen_area = area_registry.async_create("kitchen") - device_registry.async_update_device(device.id, area_id=kitchen_area.id) - - kitchen_light = entity_registry.async_get_or_create( - "light", "demo", "1234", original_name="kitchen light" - ) - entity_registry.async_update_entity( - kitchen_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - kitchen_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} - ) - - ceiling_light = entity_registry.async_get_or_create( - "light", "demo", "5678", original_name="ceiling light" - ) - entity_registry.async_update_entity( - ceiling_light.entity_id, area_id=kitchen_area.id - ) - hass.states.async_set( - ceiling_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "ceiling light"} - ) - - calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - - await hass.services.async_call( - "conversation", - "process", - {conversation.ATTR_TEXT: "turn on kitchen light"}, - ) - await hass.async_block_till_done() - - # Should only turn on one light instead of all lights in the kitchen - assert len(calls) == 1 - call = calls[0] - assert call.domain == LIGHT_DOMAIN - assert call.service == "turn_on" - assert call.data == {"entity_id": [kitchen_light.entity_id]} + assert agent._lang_intents.get("not-a-language") is default_agent.ERROR_SENTINEL async def test_agent_id_validator_invalid_agent( @@ -1081,64 +190,6 @@ async def test_agent_id_validator_invalid_agent( conversation.agent_id_validator("conversation.home_assistant") -async def test_get_agent_list( - hass: HomeAssistant, - init_components, - mock_conversation_agent: MockAgent, - mock_agent_support_all: MockAgent, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test getting agent info.""" - client = await hass_ws_client(hass) - - await client.send_json_auto_id({"type": "conversation/agent/list"}) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "smurfish"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "en-UK"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - await client.send_json_auto_id( - {"type": "conversation/agent/list", "language": "de", "country": "ch"} - ) - msg = await client.receive_json() - assert msg["type"] == "result" - assert msg["success"] - assert msg["result"] == snapshot - - async def test_get_agent_info( hass: HomeAssistant, init_components, @@ -1164,331 +215,16 @@ async def test_get_agent_info( assert agent_info == snapshot -async def test_ws_hass_agent_debug( +@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS) +async def test_prepare_agent( hass: HomeAssistant, init_components, - hass_ws_client: WebSocketGenerator, - area_registry: ar.AreaRegistry, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, + agent_id: str, ) -> None: - """Test homeassistant agent debug websocket command.""" - client = await hass_ws_client(hass) - - kitchen_area = area_registry.async_create("kitchen") - entity_registry.async_get_or_create( - "light", "demo", "1234", suggested_object_id="kitchen" - ) - entity_registry.async_update_entity( - "light.kitchen", - aliases={"my cool light"}, - area_id=kitchen_area.id, - ) - hass.states.async_set("light.kitchen", "off") - - on_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") - off_calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") - - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "turn on my cool light", - "turn my cool light off", - "turn on all lights in the kitchen", - "how many lights are on in the kitchen?", - "this will not match anything", # None in results - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - # Last sentence should be a failed match - assert msg["result"]["results"][-1] is None - - # Light state should not have been changed - assert len(on_calls) == 0 - assert len(off_calls) == 0 - - -async def test_ws_hass_agent_debug_null_result( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a null result.""" - client = await hass_ws_client(hass) - - async def async_recognize(self, user_input, *args, **kwargs): - if user_input.text == "bad sentence": - return None - - return await self.async_recognize(user_input, *args, **kwargs) - + """Test prepare agent.""" with patch( - "homeassistant.components.conversation.default_agent.DefaultAgent.async_recognize", - async_recognize, - ): - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "bad sentence", - ], - } - ) + "homeassistant.components.conversation.default_agent.DefaultAgent.async_prepare" + ) as mock_prepare: + await conversation.async_prepare_agent(hass, agent_id, "en") - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - assert msg["result"]["results"] == [None] - - -async def test_ws_hass_agent_debug_out_of_range( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with an out of range entity.""" - test_light = entity_registry.async_get_or_create("light", "demo", "1234") - hass.states.async_set( - test_light.entity_id, "off", attributes={ATTR_FRIENDLY_NAME: "test light"} - ) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 100%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert results[0]["match"] - - # Brightness is out of range - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "set test light brightness to 1001%", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - results = msg["result"]["results"] - assert len(results) == 1 - assert not results[0]["match"] - - # Name matched, but brightness didn't - assert results[0]["slots"] == {"name": "test light"} - assert results[0]["unmatched_slots"] == {"brightness": 1001} - - -async def test_ws_hass_agent_debug_custom_sentence( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, - entity_registry: er.EntityRegistry, -) -> None: - """Test homeassistant agent debug websocket command with a custom sentence.""" - # Expecting testing_config/custom_sentences/en/beer.yaml - intent.async_register(hass, OrderBeerIntentHandler()) - - client = await hass_ws_client(hass) - - # Brightness is in range (0-100) - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": [ - "I'd like to order a lager, please.", - ], - } - ) - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "custom" - assert debug_results[0].get("file") == "en/beer.yaml" - - -async def test_ws_hass_agent_debug_sentence_trigger( - hass: HomeAssistant, - init_components, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test homeassistant agent debug websocket command with a sentence trigger.""" - calls = async_mock_service(hass, "test", "automation") - assert await async_setup_component( - hass, - "automation", - { - "automation": { - "trigger": { - "platform": "conversation", - "command": ["hello", "hello[ world]"], - }, - "action": { - "service": "test.automation", - "data_template": {"data": "{{ trigger }}"}, - }, - } - }, - ) - - client = await hass_ws_client(hass) - - # Use trigger sentence - await client.send_json_auto_id( - { - "type": "conversation/agent/homeassistant/debug", - "sentences": ["hello world"], - } - ) - await hass.async_block_till_done() - - msg = await client.receive_json() - - assert msg["success"] - assert msg["result"] == snapshot - - debug_results = msg["result"].get("results", []) - assert len(debug_results) == 1 - assert debug_results[0].get("match") - assert debug_results[0].get("source") == "trigger" - assert debug_results[0].get("sentence_template") == "hello[ world]" - - # Trigger should not have been executed - assert len(calls) == 0 - - -async def test_custom_sentences_priority( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test that user intents from custom_sentences have priority over builtin intents/sentences.""" - with tempfile.NamedTemporaryFile( - mode="w+", - encoding="utf-8", - suffix=".yaml", - dir=os.path.join(hass.config.config_dir, "custom_sentences", "en"), - ) as custom_sentences_file: - # Add a custom sentence that would match a builtin sentence. - # Custom sentences have priority. - yaml.dump( - { - "language": "en", - "intents": { - "CustomIntent": {"data": [{"sentences": ["turn on the lamp"]}]} - }, - }, - custom_sentences_file, - ) - custom_sentences_file.flush() - custom_sentences_file.seek(0) - - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) - assert await async_setup_component(hass, "light", {}) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "intent_script", - { - "intent_script": { - "CustomIntent": {"speech": {"text": "custom response"}} - } - }, - ) - - # Ensure that a "lamp" exists so that we can verify the custom intent - # overrides the builtin sentence. - hass.states.async_set("light.lamp", "off") - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={ - "text": "turn on the lamp", - "language": hass.config.language, - }, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "custom response" - - -async def test_config_sentences_priority( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_admin_user: MockUser, - snapshot: SnapshotAssertion, -) -> None: - """Test that user intents from configuration.yaml have priority over builtin intents/sentences.""" - # Add a custom sentence that would match a builtin sentence. - # Custom sentences have priority. - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "intent", {}) - assert await async_setup_component( - hass, - "conversation", - {"conversation": {"intents": {"CustomIntent": ["turn on the lamp"]}}}, - ) - assert await async_setup_component(hass, "light", {}) - assert await async_setup_component( - hass, - "intent_script", - {"intent_script": {"CustomIntent": {"speech": {"text": "custom response"}}}}, - ) - - # Ensure that a "lamp" exists so that we can verify the custom intent - # overrides the builtin sentence. - hass.states.async_set("light.lamp", "off") - - client = await hass_client() - resp = await client.post( - "/api/conversation/process", - json={ - "text": "turn on the lamp", - "language": hass.config.language, - }, - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - assert data["response"]["response_type"] == "action_done" - assert data["response"]["speech"]["plain"]["speech"] == "custom response" + assert len(mock_prepare.mock_calls) == 1 diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index c586eb8865d..59cd10d2510 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -33,7 +33,7 @@ async def test_converation_trace( assert traces last_trace = traces[-1].as_dict() assert last_trace.get("events") - assert len(last_trace.get("events")) == 1 + assert len(last_trace.get("events")) == 2 trace_event = last_trace["events"][0] assert ( trace_event.get("event_type") == trace.ConversationTraceEventType.ASYNC_PROCESS @@ -50,6 +50,16 @@ async def test_converation_trace( == "Added apples" ) + trace_event = last_trace["events"][1] + assert trace_event.get("event_type") == trace.ConversationTraceEventType.TOOL_CALL + assert trace_event.get("data") == { + "intent_name": "HassListAddItem", + "slots": { + "name": "Shopping List", + "item": "apples ", + }, + } + async def test_converation_trace_error( hass: HomeAssistant, diff --git a/tests/components/conversation/test_trigger.py b/tests/components/conversation/test_trigger.py index c5d4382e917..3c3e58e7136 100644 --- a/tests/components/conversation/test_trigger.py +++ b/tests/components/conversation/test_trigger.py @@ -11,16 +11,9 @@ from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import trigger from homeassistant.setup import async_setup_component -from tests.common import async_mock_service from tests.typing import WebSocketGenerator -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" @@ -29,7 +22,7 @@ async def setup_comp(hass: HomeAssistant) -> None: async def test_if_fires_on_event( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" assert await async_setup_component( @@ -62,8 +55,10 @@ async def test_if_fires_on_event( assert service_response["response"]["speech"]["plain"]["speech"] == "Done" await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -75,7 +70,7 @@ async def test_if_fires_on_event( } -async def test_response(hass: HomeAssistant, setup_comp) -> None: +async def test_response(hass: HomeAssistant) -> None: """Test the conversation response action.""" response = "I'm sorry, Dave. I'm afraid I can't do that" assert await async_setup_component( @@ -106,7 +101,7 @@ async def test_response(hass: HomeAssistant, setup_comp) -> None: assert service_response["response"]["speech"]["plain"]["speech"] == response -async def test_empty_response(hass: HomeAssistant, setup_comp) -> None: +async def test_empty_response(hass: HomeAssistant) -> None: """Test the conversation response action with an empty response.""" assert await async_setup_component( hass, @@ -137,7 +132,7 @@ async def test_empty_response(hass: HomeAssistant, setup_comp) -> None: async def test_response_same_sentence( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the conversation response action with multiple triggers using the same sentence.""" assert await async_setup_component( @@ -186,8 +181,10 @@ async def test_response_same_sentence( assert service_response["response"]["speech"]["plain"]["speech"] == "response 1" # Service should still have been called - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "trigger1", "idx": "0", @@ -201,8 +198,6 @@ async def test_response_same_sentence( async def test_response_same_sentence_with_error( hass: HomeAssistant, - calls: list[ServiceCall], - setup_comp: None, caplog: pytest.LogCaptureFixture, ) -> None: """Test the conversation response action with multiple triggers using the same sentence and an error.""" @@ -253,7 +248,7 @@ async def test_response_same_sentence_with_error( async def test_subscribe_trigger_does_not_interfere_with_responses( - hass: HomeAssistant, setup_comp, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test that subscribing to a trigger from the websocket API does not interfere with responses.""" websocket_client = await hass_ws_client() @@ -310,7 +305,7 @@ async def test_subscribe_trigger_does_not_interfere_with_responses( async def test_same_trigger_multiple_sentences( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test matching of multiple sentences from the same trigger.""" assert await async_setup_component( @@ -341,8 +336,10 @@ async def test_same_trigger_multiple_sentences( # Only triggers once await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -355,7 +352,7 @@ async def test_same_trigger_multiple_sentences( async def test_same_sentence_multiple_triggers( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test use of the same sentence in multiple triggers.""" assert await async_setup_component( @@ -403,11 +400,12 @@ async def test_same_sentence_multiple_triggers( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 3 # The calls may come in any order call_datas: set[tuple[str, str, str]] = set() - for call in calls: + service_calls.pop(0) # First call is the call to conversation.process + for call in service_calls: call_data = call.data["data"] call_datas.add((call_data["id"], call_data["platform"], call_data["sentence"])) @@ -474,9 +472,7 @@ async def test_fails_on_no_sentences(hass: HomeAssistant) -> None: ) -async def test_wildcards( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp: None -) -> None: +async def test_wildcards(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: """Test wildcards in trigger sentences.""" assert await async_setup_component( hass, @@ -507,8 +503,10 @@ async def test_wildcards( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["data"] == { + assert len(service_calls) == 2 + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["data"] == { "alias": None, "id": "0", "idx": "0", @@ -536,8 +534,6 @@ async def test_wildcards( async def test_trigger_with_device_id(hass: HomeAssistant) -> None: """Test that a trigger receives a device_id.""" - assert await async_setup_component(hass, "homeassistant", {}) - assert await async_setup_component(hass, "conversation", {}) assert await async_setup_component( hass, "automation", diff --git a/tests/components/coolmaster/conftest.py b/tests/components/coolmaster/conftest.py index 15670af4bc8..27a801288b0 100644 --- a/tests/components/coolmaster/conftest.py +++ b/tests/components/coolmaster/conftest.py @@ -18,7 +18,7 @@ DEFAULT_INFO: dict[str, str] = { "version": "1", } -TEST_UNITS: dict[dict[str, Any]] = { +TEST_UNITS: dict[str, dict[str, Any]] = { "L1.100": { "is_on": False, "thermostat": 20, diff --git a/tests/components/cover/test_device_condition.py b/tests/components/cover/test_device_condition.py index 545bdd6587e..8c1d2d1c9a7 100644 --- a/tests/components/cover/test_device_condition.py +++ b/tests/components/cover/test_device_condition.py @@ -26,7 +26,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -36,12 +35,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -359,7 +352,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -473,36 +466,36 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_open - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_open - event - test_event1" hass.states.async_set(entry.entity_id, STATE_CLOSED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_closed - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_closed - event - test_event2" hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_opening - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_opening - event - test_event3" hass.states.async_set(entry.entity_id, STATE_CLOSING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_closing - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_closing - event - test_event4" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -550,15 +543,15 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_open - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_open - event - test_event1" async def test_if_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -676,10 +669,10 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} @@ -690,9 +683,9 @@ async def test_if_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(service_calls) == 5 + assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} @@ -701,14 +694,14 @@ async def test_if_position( hass.bus.async_fire("test_event2") hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") @@ -718,7 +711,7 @@ async def test_if_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, mock_cover_entities: list[MockCover], ) -> None: @@ -836,10 +829,10 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" - assert calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" - assert calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[0].data["some"] == "is_pos_gt_45 - event - test_event1" + assert service_calls[1].data["some"] == "is_pos_lt_90 - event - test_event2" + assert service_calls[2].data["some"] == "is_pos_gt_45_lt_90 - event - test_event3" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} @@ -850,9 +843,9 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" - assert calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" + assert len(service_calls) == 5 + assert service_calls[3].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert service_calls[4].data["some"] == "is_pos_lt_90 - event - test_event2" hass.states.async_set( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} @@ -863,14 +856,14 @@ async def test_if_tilt_position( await hass.async_block_till_done() hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_pos_gt_45 - event - test_event1" hass.states.async_set(ent.entity_id, STATE_UNAVAILABLE, attributes={}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_pos_not_gt_45 - event - test_event1" for record in caplog.records: assert record.levelname in ("DEBUG", "INFO") diff --git a/tests/components/cover/test_device_trigger.py b/tests/components/cover/test_device_trigger.py index 419eea05f9f..5eb8cd484b2 100644 --- a/tests/components/cover/test_device_trigger.py +++ b/tests/components/cover/test_device_trigger.py @@ -29,7 +29,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -39,12 +38,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_trigger_types"), [ @@ -381,7 +374,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -496,36 +489,36 @@ async def test_if_fires_on_state_change( # Fake that the entity is opened. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) # Fake that the entity is closed. hass.states.async_set(entry.entity_id, STATE_CLOSED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"closed - device - {entry.entity_id} - open - closed - None" ) # Fake that the entity is opening. hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"opening - device - {entry.entity_id} - closed - opening - None" ) # Fake that the entity is closing. hass.states.async_set(entry.entity_id, STATE_CLOSING) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"closing - device - {entry.entity_id} - opening - closing - None" ) @@ -534,7 +527,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for state triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -583,9 +576,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is opened. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"opened - device - {entry.entity_id} - closed - open - None" ) @@ -594,7 +587,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -640,17 +633,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - closed - open - 0:00:05" ) @@ -660,7 +653,7 @@ async def test_if_fires_on_position( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, mock_cover_entities: list[MockCover], - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for position triggers.""" setup_test_component_platform(hass, DOMAIN, mock_cover_entities) @@ -769,9 +762,13 @@ async def test_if_fires_on_position( ent.entity_id, STATE_OPEN, attributes={"current_position": 50} ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert sorted( - [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] + [ + service_calls[0].data["some"], + service_calls[1].data["some"], + service_calls[2].data["some"], + ] ) == sorted( [ ( @@ -791,9 +788,9 @@ async def test_if_fires_on_position( ent.entity_id, STATE_CLOSED, attributes={"current_position": 45} ) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) @@ -801,9 +798,9 @@ async def test_if_fires_on_position( ent.entity_id, STATE_CLOSED, attributes={"current_position": 90} ) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) @@ -812,7 +809,7 @@ async def test_if_fires_on_tilt_position( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_cover_entities: list[MockCover], ) -> None: """Test for tilt position triggers.""" @@ -924,9 +921,13 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_OPEN, attributes={"current_tilt_position": 50} ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert sorted( - [calls[0].data["some"], calls[1].data["some"], calls[2].data["some"]] + [ + service_calls[0].data["some"], + service_calls[1].data["some"], + service_calls[2].data["some"], + ] ) == sorted( [ ( @@ -946,9 +947,9 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 45} ) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"is_pos_lt_90 - device - {entry.entity_id} - closed - closed - None" ) @@ -956,8 +957,8 @@ async def test_if_fires_on_tilt_position( ent.entity_id, STATE_CLOSED, attributes={"current_tilt_position": 90} ) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"is_pos_gt_45 - device - {entry.entity_id} - closed - closed - None" ) diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 7da6c6efe21..37740260c2f 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -156,7 +156,7 @@ def is_closing(hass, ent): return hass.states.is_state(ent.entity_id, STATE_CLOSING) -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 8ee621596db..1cf23c4c3df 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -1,5 +1,9 @@ """The tests for the cover platform.""" +from typing import Any + +import pytest + from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, DOMAIN, @@ -16,15 +20,24 @@ from homeassistant.setup import async_setup_component from tests.common import async_mock_service -async def test_open_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassOpenCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_CLOSED) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_CLOSED, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) response = await intent.async_handle( - hass, "test", cover_intent.INTENT_OPEN_COVER, {"name": {"value": "garage door"}} + hass, "test", cover_intent.INTENT_OPEN_COVER, slots ) await hass.async_block_till_done() @@ -36,18 +49,27 @@ async def test_open_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_close_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassCloseCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_OPEN) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_OPEN, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) response = await intent.async_handle( hass, "test", cover_intent.INTENT_CLOSE_COVER, - {"name": {"value": "garage door"}}, + slots, ) await hass.async_block_till_done() @@ -59,13 +81,22 @@ async def test_close_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_set_cover_position(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "test cover"}, "position": {"value": 50}}), + ({"device_class": {"value": "shade"}, "position": {"value": 50}}), + ], +) +async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassSetPosition intent for covers.""" assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( - entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} + entity_id, + STATE_CLOSED, + attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) @@ -73,7 +104,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: hass, "test", intent.INTENT_SET_POSITION, - {"name": {"value": "test cover"}, "position": {"value": 50}}, + slots, ) await hass.async_block_till_done() diff --git a/tests/components/cpuspeed/conftest.py b/tests/components/cpuspeed/conftest.py index e3ea1432659..d9079079ba2 100644 --- a/tests/components/cpuspeed/conftest.py +++ b/tests/components/cpuspeed/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.cpuspeed.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/crownstone/test_config_flow.py b/tests/components/crownstone/test_config_flow.py index be9086e02da..5dd00e7baff 100644 --- a/tests/components/crownstone/test_config_flow.py +++ b/tests/components/crownstone/test_config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from crownstone_cloud.cloud_models.spheres import Spheres @@ -11,7 +12,6 @@ from crownstone_cloud.exceptions import ( ) import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from homeassistant.components import usb from homeassistant.components.crownstone.const import ( diff --git a/tests/components/daikin/test_config_flow.py b/tests/components/daikin/test_config_flow.py index 6d957384d4d..5c432e111dd 100644 --- a/tests/components/daikin/test_config_flow.py +++ b/tests/components/daikin/test_config_flow.py @@ -28,9 +28,11 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch("homeassistant.components.daikin.config_flow.Appliance") as Appliance: + with patch( + "homeassistant.components.daikin.config_flow.DaikinFactory" + ) as Appliance: type(Appliance).mac = PropertyMock(return_value="AABBCCDDEEFF") - Appliance.factory.side_effect = mock_daikin_factory + Appliance.side_effect = mock_daikin_factory yield Appliance @@ -90,7 +92,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, mock_daikin) -> None: ) async def test_device_abort(hass: HomeAssistant, mock_daikin, s_effect, reason) -> None: """Test device abort.""" - mock_daikin.factory.side_effect = s_effect + mock_daikin.side_effect = s_effect result = await hass.config_entries.flow.async_init( "daikin", diff --git a/tests/components/daikin/test_init.py b/tests/components/daikin/test_init.py index d7d754dacd2..b3d18467d33 100644 --- a/tests/components/daikin/test_init.py +++ b/tests/components/daikin/test_init.py @@ -27,8 +27,8 @@ def mock_daikin(): """Mock the init function in pydaikin.""" return Appliance - with patch("homeassistant.components.daikin.Appliance") as Appliance: - Appliance.factory.side_effect = mock_daikin_factory + with patch("homeassistant.components.daikin.DaikinFactory") as Appliance: + Appliance.side_effect = mock_daikin_factory type(Appliance).update_status = AsyncMock() type(Appliance).device_ip = PropertyMock(return_value=HOST) type(Appliance).inside_temperature = PropertyMock(return_value=22) @@ -208,7 +208,7 @@ async def test_client_connection_error(hass: HomeAssistant, mock_daikin) -> None ) config_entry.add_to_hass(hass) - mock_daikin.factory.side_effect = ClientConnectionError + mock_daikin.side_effect = ClientConnectionError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -224,7 +224,7 @@ async def test_timeout_error(hass: HomeAssistant, mock_daikin) -> None: ) config_entry.add_to_hass(hass) - mock_daikin.factory.side_effect = TimeoutError + mock_daikin.side_effect = TimeoutError await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/deconz/conftest.py b/tests/components/deconz/conftest.py index d0f0f11c99b..fd3003b96ef 100644 --- a/tests/components/deconz/conftest.py +++ b/tests/components/deconz/conftest.py @@ -2,30 +2,304 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine, Generator +from types import MappingProxyType +from typing import Any, Protocol from unittest.mock import patch from pydeconz.websocket import Signal import pytest +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 +from tests.test_util.aiohttp import AiohttpClientMocker + +type ConfigEntryFactoryType = Callable[ + [MockConfigEntry], Coroutine[Any, Any, MockConfigEntry] +] +type WebsocketDataType = Callable[[dict[str, Any]], Coroutine[Any, Any, None]] +type WebsocketStateType = Callable[[str], Coroutine[Any, Any, None]] -@pytest.fixture(autouse=True) -def mock_deconz_websocket(): +class _WebsocketMock(Protocol): + async def __call__( + self, data: dict[str, Any] | None = None, state: str = "" + ) -> None: ... + + +# Config entry fixtures + +API_KEY = "1234567890ABCDEF" +BRIDGE_ID = "01234E56789A" +HOST = "1.2.3.4" +PORT = 80 + + +@pytest.fixture(name="config_entry") +def fixture_config_entry( + config_entry_data: MappingProxyType[str, Any], + config_entry_options: MappingProxyType[str, Any], + config_entry_source: str, +) -> MockConfigEntry: + """Define a config entry fixture.""" + return MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="1", + unique_id=BRIDGE_ID, + data=config_entry_data, + options=config_entry_options, + source=config_entry_source, + ) + + +@pytest.fixture(name="config_entry_data") +def fixture_config_entry_data() -> MappingProxyType[str, Any]: + """Define a config entry data fixture.""" + return { + CONF_API_KEY: API_KEY, + CONF_HOST: HOST, + CONF_PORT: PORT, + } + + +@pytest.fixture(name="config_entry_options") +def fixture_config_entry_options() -> MappingProxyType[str, Any]: + """Define a config entry options fixture.""" + return {} + + +@pytest.fixture(name="config_entry_source") +def fixture_config_entry_source() -> str: + """Define a config entry source fixture.""" + return SOURCE_USER + + +# Request mocks + + +@pytest.fixture(name="mock_put_request") +def fixture_put_request( + aioclient_mock: AiohttpClientMocker, config_entry_data: MappingProxyType[str, Any] +) -> Callable[[str, str], AiohttpClientMocker]: + """Mock a deCONZ put request.""" + _host = config_entry_data[CONF_HOST] + _port = config_entry_data[CONF_PORT] + _api_key = config_entry_data[CONF_API_KEY] + + def __mock_requests(path: str, host: str = "") -> AiohttpClientMocker: + url = f"http://{host or _host}:{_port}/api/{_api_key}{path}" + aioclient_mock.put(url, json={}, headers={"content-type": CONTENT_TYPE_JSON}) + return aioclient_mock + + return __mock_requests + + +@pytest.fixture(name="mock_requests") +def fixture_get_request( + aioclient_mock: AiohttpClientMocker, + config_entry_data: MappingProxyType[str, Any], + config_payload: dict[str, Any], + alarm_system_payload: dict[str, Any], + group_payload: dict[str, Any], + light_payload: dict[str, Any], + sensor_payload: dict[str, Any], + deconz_payload: dict[str, Any], +) -> Callable[[str], None]: + """Mock default deCONZ requests responses.""" + _host = config_entry_data[CONF_HOST] + _port = config_entry_data[CONF_PORT] + _api_key = config_entry_data[CONF_API_KEY] + + data = deconz_payload + data.setdefault("alarmsystems", alarm_system_payload) + data.setdefault("config", config_payload) + data.setdefault("groups", group_payload) + if "state" in light_payload: + light_payload = {"0": light_payload} + data.setdefault("lights", light_payload) + if "state" in sensor_payload or "config" in sensor_payload: + sensor_payload = {"0": sensor_payload} + data.setdefault("sensors", sensor_payload) + + def __mock_requests(host: str = "") -> None: + url = f"http://{host or _host}:{_port}/api/{_api_key}" + aioclient_mock.get( + url, + json=deconz_payload | {"config": config_payload}, + headers={ + "content-type": CONTENT_TYPE_JSON, + }, + ) + + return __mock_requests + + +# Request payload fixtures + + +@pytest.fixture(name="deconz_payload") +def fixture_data() -> dict[str, Any]: + """Combine multiple payloads with one fixture.""" + return {} + + +@pytest.fixture(name="alarm_system_payload") +def fixture_alarm_system_data() -> dict[str, Any]: + """Alarm system data.""" + return {} + + +@pytest.fixture(name="config_payload") +def fixture_config_data() -> dict[str, Any]: + """Config data.""" + return { + "bridgeid": BRIDGE_ID, + "ipaddress": HOST, + "mac": "00:11:22:33:44:55", + "modelid": "deCONZ", + "name": "deCONZ mock gateway", + "sw_version": "2.05.69", + "uuid": "1234", + "websocketport": 1234, + } + + +@pytest.fixture(name="group_payload") +def fixture_group_data() -> dict[str, Any]: + """Group data.""" + return {} + + +@pytest.fixture(name="light_payload") +def fixture_light_data() -> dict[str, Any]: + """Light data. + + Should be + - one light data payload {"state": ...} + - multiple lights {"1": ..., "2": ...} + """ + return {} + + +@pytest.fixture(name="sensor_payload") +def fixture_sensor_data() -> dict[str, Any]: + """Sensor data. + + Should be + - one sensor data payload {"config": ..., "state": ...} ("0") + - multiple sensors {"1": ..., "2": ...} + """ + return {} + + +@pytest.fixture(name="config_entry_factory") +async def fixture_config_entry_factory( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_requests: Callable[[str], None], +) -> ConfigEntryFactoryType: + """Fixture factory that can set up UniFi network integration.""" + + async def __mock_setup_config_entry( + entry: MockConfigEntry = config_entry, + ) -> MockConfigEntry: + entry.add_to_hass(hass) + mock_requests(entry.data[CONF_HOST]) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry + + return __mock_setup_config_entry + + +@pytest.fixture(name="config_entry_setup") +async def fixture_config_entry_setup( + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: + """Fixture providing a set up instance of deCONZ integration.""" + return await config_entry_factory() + + +# Websocket fixtures + + +@pytest.fixture(autouse=True, name="_mock_websocket") +def fixture_websocket() -> Generator[_WebsocketMock]: """No real websocket allowed.""" with patch("pydeconz.gateway.WSClient") as mock: - async def make_websocket_call(data: dict | None = None, state: str = ""): + async def make_websocket_call( + data: dict[str, Any] | None = None, state: str = "" + ) -> None: """Generate a websocket call.""" pydeconz_gateway_session_handler = mock.call_args[0][3] + signal: Signal if data: mock.return_value.data = data - await pydeconz_gateway_session_handler(signal=Signal.DATA) + signal = Signal.DATA elif state: mock.return_value.state = state - await pydeconz_gateway_session_handler(signal=Signal.CONNECTION_STATE) - else: - raise NotImplementedError + signal = Signal.CONNECTION_STATE + await pydeconz_gateway_session_handler(signal) yield make_websocket_call + + +@pytest.fixture(name="mock_websocket_data") +def fixture_websocket_data(_mock_websocket: _WebsocketMock) -> WebsocketDataType: + """Fixture to send websocket data.""" + + async def change_websocket_data(data: dict[str, Any]) -> None: + """Provide new data on the websocket.""" + if "t" not in data: + data["t"] = "event" + if "e" not in data: + data["e"] = "changed" + if "id" not in data: + data["id"] = "0" + await _mock_websocket(data=data) + + return change_websocket_data + + +@pytest.fixture(name="light_ws_data") +def fixture_light_websocket_data( + mock_websocket_data: WebsocketDataType, +) -> WebsocketDataType: + """Fixture to send light data over websocket.""" + + async def send_light_data(data: dict[str, Any]) -> None: + """Send light data on the websocket.""" + await mock_websocket_data({"r": "lights"} | data) + + return send_light_data + + +@pytest.fixture(name="sensor_ws_data") +def fixture_sensor_websocket_data( + mock_websocket_data: WebsocketDataType, +) -> WebsocketDataType: + """Fixture to send sensor data over websocket.""" + + async def send_sensor_data(data: dict[str, Any]) -> None: + """Send sensor data on the websocket.""" + await mock_websocket_data({"r": "sensors"} | data) + + return send_sensor_data + + +@pytest.fixture(name="mock_websocket_state") +def fixture_websocket_state(_mock_websocket: _WebsocketMock) -> WebsocketStateType: + """Fixture to set websocket state.""" + + async def change_websocket_state(state: str) -> None: + """Simulate a change to the websocket connection state.""" + await _mock_websocket(state=state) + + return change_websocket_state diff --git a/tests/components/deconz/snapshots/test_alarm_control_panel.ambr b/tests/components/deconz/snapshots/test_alarm_control_panel.ambr new file mode 100644 index 00000000000..86b97a62dfe --- /dev/null +++ b/tests/components/deconz/snapshots/test_alarm_control_panel.ambr @@ -0,0 +1,51 @@ +# serializer version: 1 +# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'alarm_control_panel', + 'entity_category': None, + 'entity_id': 'alarm_control_panel.keypad', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Keypad', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_alarm_control_panel[sensor_payload0-alarm_system_payload0][alarm_control_panel.keypad-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'changed_by': None, + 'code_arm_required': True, + 'code_format': , + 'friendly_name': 'Keypad', + 'supported_features': , + }), + 'context': , + 'entity_id': 'alarm_control_panel.keypad', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_binary_sensor.ambr b/tests/components/deconz/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..584575c23af --- /dev/null +++ b/tests/components/deconz/snapshots/test_binary_sensor.ambr @@ -0,0 +1,1014 @@ +# serializer version: 1 +# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alarm_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload0-expected0-config_entry_options0][binary_sensor.alarm_10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'safety', + 'friendly_name': 'Alarm 10', + 'on': True, + 'temperature': 26.0, + }), + 'context': , + 'entity_id': 'binary_sensor.alarm_10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.cave_co', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'Cave CO', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.cave_co_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Cave CO Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.cave_co_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cave CO Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload1-expected1-config_entry_options0][binary_sensor.cave_co_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Cave CO Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.cave_co_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Presence sensor Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload10-expected10-config_entry_options0][binary_sensor.presence_sensor_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Presence sensor Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke Test Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload2-expected2-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke Test Mode', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-fire', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'sensor_kitchen_smoke Test Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload3-expected3-config_entry_options0][binary_sensor.sensor_kitchen_smoke_test_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'sensor_kitchen_smoke Test Mode', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_kitchen_smoke_test_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.kitchen_switch', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Kitchen Switch', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'kitchen-switch-flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload4-expected4-config_entry_options0][binary_sensor.kitchen_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kitchen Switch', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.kitchen_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.back_door', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Back Door', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2b:96:b4-01-0006-open', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload5-expected5-config_entry_options0][binary_sensor.back_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'opening', + 'friendly_name': 'Back Door', + 'on': True, + 'temperature': 33.0, + }), + 'context': , + 'entity_id': 'binary_sensor.back_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.motion_sensor_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0406-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload6-expected6-config_entry_options0][binary_sensor.motion_sensor_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Motion sensor 4', + 'on': True, + }), + 'context': , + 'entity_id': 'binary_sensor.motion_sensor_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.water2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-water', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'water2', + 'on': True, + 'temperature': 25.0, + }), + 'context': , + 'entity_id': 'binary_sensor.water2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.water2_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2 Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'water2 Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.water2_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.water2_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'water2 Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:2f:07:db-01-0500-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload7-expected7-config_entry_options0][binary_sensor.water2_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'water2 Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.water2_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.vibration_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Vibration 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:a5:21:24-01-0101-vibration', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload8-expected8-config_entry_options0][binary_sensor.vibration_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'vibration', + 'friendly_name': 'Vibration 1', + 'on': True, + 'orientation': list([ + 10, + 1059, + 0, + ]), + 'temperature': 32.0, + 'tiltangle': 83, + 'vibrationstrength': 114, + }), + 'context': , + 'entity_id': 'binary_sensor.vibration_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Presence sensor Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor Tampered', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-tampered', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[sensor_payload9-expected9-config_entry_options0][binary_sensor.presence_sensor_tampered-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'tamper', + 'friendly_name': 'Presence sensor Tampered', + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor_tampered', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_button.ambr b/tests/components/deconz/snapshots/test_button.ambr new file mode 100644 index 00000000000..1ef5248ebc3 --- /dev/null +++ b/tests/components/deconz/snapshots/test_button.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.light_group_scene_store_current_scene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:inbox-arrow-down', + 'original_name': 'Scene Store Current Scene', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01234E56789A/groups/1/scenes/1-store', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[deconz_payload0-expected0][button.light_group_scene_store_current_scene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light group Scene Store Current Scene', + 'icon': 'mdi:inbox-arrow-down', + }), + 'context': , + 'entity_id': 'button.light_group_scene_store_current_scene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.aqara_fp1_reset_presence', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Aqara FP1 Reset Presence', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_button[deconz_payload1-expected1][button.aqara_fp1_reset_presence-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Aqara FP1 Reset Presence', + }), + 'context': , + 'entity_id': 'button.aqara_fp1_reset_presence', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_climate.ambr b/tests/components/deconz/snapshots/test_climate.ambr new file mode 100644 index 00000000000..4e33e11534e --- /dev/null +++ b/tests/components/deconz/snapshots/test_climate.ambr @@ -0,0 +1,545 @@ +# serializer version: 1 +# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_cooling_support[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'off', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_fan_support[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'preset_modes': list([ + 'auto', + 'boost', + 'comfort', + 'complex', + 'eco', + 'holiday', + 'manual', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.zen_01', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Zen-01', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:11:6f:56-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_with_preset[sensor_payload0][climate.zen_01-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 23.2, + 'fan_mode': 'off', + 'fan_modes': list([ + 'smart', + 'auto', + 'high', + 'medium', + 'low', + 'on', + 'off', + ]), + 'friendly_name': 'Zen-01', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'preset_mode': 'auto', + 'preset_modes': list([ + 'auto', + 'boost', + 'comfort', + 'complex', + 'eco', + 'holiday', + 'manual', + ]), + 'supported_features': , + 'temperature': 22.2, + }), + 'context': , + 'entity_id': 'climate.zen_01', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_device_without_cooling_support[sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'Thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 10, + 'supported_features': , + 'temperature': 22.0, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'auto', + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.clip_thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CLIP thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.clip_thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'CLIP thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'supported_features': , + 'temperature': None, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.clip_thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_clip_climate_device[config_entry_options0-sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.6, + 'friendly_name': 'Thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + 'offset': 10, + 'supported_features': , + 'temperature': 22.0, + 'valve': 30, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'auto', + }) +# --- +# name: test_simple_climate_device[sensor_payload0][climate.thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35, + 'min_temp': 7, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.thermostat', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'thermostat', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '14:b4:57:ff:fe:d5:4e:77-01-0201', + 'unit_of_measurement': None, + }) +# --- +# name: test_simple_climate_device[sensor_payload0][climate.thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'thermostat', + 'hvac_action': , + 'hvac_modes': list([ + , + , + ]), + 'locked': True, + 'max_temp': 35, + 'min_temp': 7, + 'offset': 0, + 'supported_features': , + 'temperature': 21.0, + 'valve': 24, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_cover.ambr b/tests/components/deconz/snapshots/test_cover.ambr new file mode 100644 index 00000000000..5c50923453c --- /dev/null +++ b/tests/components/deconz/snapshots/test_cover.ambr @@ -0,0 +1,150 @@ +# serializer version: 1 +# name: test_cover[light_payload0][cover.window_covering_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.window_covering_device', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Window covering device', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[light_payload0][cover.window_covering_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 0, + 'device_class': 'shade', + 'friendly_name': 'Window covering device', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.window_covering_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_level_controllable_output_cover[light_payload0][cover.vent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.vent', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Vent', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:22:a3:00:00:00:00:00-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_level_controllable_output_cover[light_payload0][cover.vent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 5, + 'current_tilt_position': 97, + 'device_class': 'damper', + 'friendly_name': 'Vent', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.vent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_tilt_cover[light_payload0][cover.covering_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.covering_device', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Covering device', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:24:46:00:00:12:34:56-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_tilt_cover[light_payload0][cover.covering_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'current_tilt_position': 100, + 'device_class': 'shade', + 'friendly_name': 'Covering device', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.covering_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_fan.ambr b/tests/components/deconz/snapshots/test_fan.ambr new file mode 100644 index 00000000000..8b7dbba64e4 --- /dev/null +++ b/tests/components/deconz/snapshots/test_fan.ambr @@ -0,0 +1,54 @@ +# serializer version: 1 +# name: test_fans[light_payload0][fan.ceiling_fan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': None, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.ceiling_fan', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ceiling fan', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:22:a3:00:00:27:8b:81-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_fans[light_payload0][fan.ceiling_fan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Ceiling fan', + 'percentage': 100, + 'percentage_step': 1.0, + 'preset_mode': None, + 'preset_modes': None, + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.ceiling_fan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_hub.ambr b/tests/components/deconz/snapshots/test_hub.ambr new file mode 100644 index 00000000000..f3aa9a5e65d --- /dev/null +++ b/tests/components/deconz/snapshots/test_hub.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_registry_entry + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://1.2.3.4:80', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'deconz', + '01234E56789A', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Dresden Elektronik', + 'model': 'deCONZ', + 'model_id': None, + 'name': 'deCONZ mock gateway', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr new file mode 100644 index 00000000000..46b6611dcbe --- /dev/null +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -0,0 +1,2971 @@ +# serializer version: 1 +# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 255, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 255, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input0-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input0-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input1-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input1-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.dimmable_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.dimmable_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Dimmable light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:02-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.dimmable_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Dimmable light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.dimmable_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.group-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.group', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01234E56789A-/groups/0', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.group-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'all_on': False, + 'brightness': 50, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Group', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': True, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.group', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.rgb_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.rgb_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'RGB light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.rgb_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 50, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'RGB light', + 'hs_color': tuple( + 52.0, + 100.0, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 255, + 221, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.5, + 0.5, + ), + }), + 'context': , + 'entity_id': 'light.rgb_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_groups[input2-light_payload0][light.tunable_white_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.tunable_white_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tunable white light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_groups[input2-light_payload0][light.tunable_white_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': , + 'color_temp': 2500, + 'color_temp_kelvin': 400, + 'friendly_name': 'Tunable white light', + 'hs_color': tuple( + 15.981, + 100.0, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6451, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 155, + 'rgb_color': tuple( + 255, + 67, + 0, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.674, + 0.322, + ), + }), + 'context': , + 'entity_id': 'light.tunable_white_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload0-expected0][light.hue_go-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_go', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Go', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload0-expected0][light.hue_go-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 375, + 'color_temp_kelvin': 2666, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Go', + 'hs_color': tuple( + 28.47, + 66.821, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 165, + 84, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.53, + 0.388, + ), + }), + 'context': , + 'entity_id': 'light.hue_go', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload0][light.hue_go-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_go', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Go', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-00', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload0][light.hue_go-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 375, + 'color_temp_kelvin': 2666, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Go', + 'hs_color': tuple( + 28.47, + 66.821, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 165, + 84, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.53, + 0.388, + ), + }), + 'context': , + 'entity_id': 'light.hue_go', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload1-expected1][light.hue_ensis-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_ensis', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Ensis', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload1-expected1][light.hue_ensis-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Ensis', + 'hs_color': tuple( + 29.691, + 38.039, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'rgb_color': tuple( + 255, + 206, + 158, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.427, + 0.373, + ), + }), + 'context': , + 'entity_id': 'light.hue_ensis', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload1][light.hue_ensis-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_ensis', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Ensis', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload1][light.hue_ensis-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Hue Ensis', + 'hs_color': tuple( + 29.691, + 38.039, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 7142, + 'max_mireds': 650, + 'min_color_temp_kelvin': 1538, + 'min_mireds': 140, + 'rgb_color': tuple( + 255, + 206, + 158, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.427, + 0.373, + ), + }), + 'context': , + 'entity_id': 'light.hue_ensis', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.lidl_xmas_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LIDL xmas light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 25, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'friendly_name': 'LIDL xmas light', + 'hs_color': tuple( + 294.938, + 55.294, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 243, + 113, + 255, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.357, + 0.188, + ), + }), + 'context': , + 'entity_id': 'light.lidl_xmas_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload2][light.lidl_xmas_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.lidl_xmas_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'LIDL xmas light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload2][light.lidl_xmas_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 25, + 'color_mode': , + 'effect': None, + 'effect_list': list([ + 'carnival', + 'collide', + 'fading', + 'fireworks', + 'flag', + 'glow', + 'rainbow', + 'snake', + 'snow', + 'sparkles', + 'steady', + 'strobe', + 'twinkle', + 'updown', + 'vintage', + 'waves', + ]), + 'friendly_name': 'LIDL xmas light', + 'hs_color': tuple( + 294.938, + 55.294, + ), + 'is_deconz_group': False, + 'rgb_color': tuple( + 243, + 113, + 255, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.357, + 0.188, + ), + }), + 'context': , + 'entity_id': 'light.lidl_xmas_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_white_ambiance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue White Ambiance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-02', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 396, + 'color_temp_kelvin': 2525, + 'friendly_name': 'Hue White Ambiance', + 'hs_color': tuple( + 28.809, + 71.624, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 160, + 72, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.544, + 0.389, + ), + }), + 'context': , + 'entity_id': 'light.hue_white_ambiance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload3][light.hue_white_ambiance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_white_ambiance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue White Ambiance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-02', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload3][light.hue_white_ambiance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'color_temp': 396, + 'color_temp_kelvin': 2525, + 'friendly_name': 'Hue White Ambiance', + 'hs_color': tuple( + 28.809, + 71.624, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 454, + 'min_color_temp_kelvin': 2202, + 'min_mireds': 153, + 'rgb_color': tuple( + 255, + 160, + 72, + ), + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.544, + 0.389, + ), + }), + 'context': , + 'entity_id': 'light.hue_white_ambiance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload4-expected4][light.hue_filament-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_filament', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Filament', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-03', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload4-expected4][light.hue_filament-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'friendly_name': 'Hue Filament', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.hue_filament', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload4][light.hue_filament-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.hue_filament', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hue Filament', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:01:23:45:67-03', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload4][light.hue_filament-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 254, + 'color_mode': , + 'friendly_name': 'Hue Filament', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.hue_filament', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload5-expected5][light.simple_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.simple_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Simple Light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload5-expected5][light.simple_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Simple Light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.simple_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload5][light.simple_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.simple_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Simple Light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:01:23:45:67-01', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload5][light.simple_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Simple Light', + 'is_deconz_group': False, + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.simple_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload6-expected6][light.gradient_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.gradient_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gradient light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload6-expected6][light.gradient_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 184, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Gradient light', + 'hs_color': tuple( + 98.095, + 74.118, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 135, + 255, + 66, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.2727, + 0.6226, + ), + }), + 'context': , + 'entity_id': 'light.gradient_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_lights[light_payload6][light.gradient_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'effect_list': list([ + 'colorloop', + ]), + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'supported_color_modes': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.gradient_light', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gradient light', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', + 'unit_of_measurement': None, + }) +# --- +# name: test_lights[light_payload6][light.gradient_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 184, + 'color_mode': , + 'color_temp': None, + 'color_temp_kelvin': None, + 'effect': None, + 'effect_list': list([ + 'colorloop', + ]), + 'friendly_name': 'Gradient light', + 'hs_color': tuple( + 98.095, + 74.118, + ), + 'is_deconz_group': False, + 'max_color_temp_kelvin': 6535, + 'max_mireds': 500, + 'min_color_temp_kelvin': 2000, + 'min_mireds': 153, + 'rgb_color': tuple( + 135, + 255, + 66, + ), + 'supported_color_modes': list([ + , + , + , + ]), + 'supported_features': , + 'xy_color': tuple( + 0.2727, + 0.6226, + ), + }), + 'context': , + 'entity_id': 'light.gradient_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_number.ambr b/tests/components/deconz/snapshots/test_number.ambr new file mode 100644 index 00000000000..5311addc7a1 --- /dev/null +++ b/tests/components/deconz/snapshots/test_number.ambr @@ -0,0 +1,211 @@ +# serializer version: 1 +# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.presence_sensor_delay', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Presence sensor Delay', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-delay', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Presence sensor Delay', + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.presence_sensor_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.presence_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Presence sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-presence', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'motion', + 'friendly_name': 'Presence sensor', + 'on': True, + 'temperature': 0.1, + }), + 'context': , + 'entity_id': 'binary_sensor.presence_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.presence_sensor_duration', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Presence sensor Duration', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-duration', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Presence sensor Duration', + 'max': 65535, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.presence_sensor_duration', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_scene.ambr b/tests/components/deconz/snapshots/test_scene.ambr new file mode 100644 index 00000000000..85a5ab92c5c --- /dev/null +++ b/tests/components/deconz/snapshots/test_scene.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_scenes[group_payload0-expected0][scene.light_group_scene-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.light_group_scene', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scene', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01234E56789A/groups/1/scenes/1', + 'unit_of_measurement': None, + }) +# --- +# name: test_scenes[group_payload0-expected0][scene.light_group_scene-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Light group Scene', + }), + 'context': , + 'entity_id': 'scene.light_group_scene', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_select.ambr b/tests/components/deconz/snapshots/test_select.ambr new file mode 100644 index 00000000000..12966709947 --- /dev/null +++ b/tests/components/deconz/snapshots/test_select.ambr @@ -0,0 +1,508 @@ +# serializer version: 1 +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload0-expected0][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload1-expected1][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Device Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_device_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Device Mode', + 'options': list([ + 'leftright', + 'undirected', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_device_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'undirected', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Sensitivity', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_sensitivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Sensitivity', + 'options': list([ + 'High', + 'Medium', + 'Low', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_sensitivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'High', + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aqara FP1 Trigger Distance', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload2-expected2][select.aqara_fp1_trigger_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Aqara FP1 Trigger Distance', + 'options': list([ + 'far', + 'medium', + 'near', + ]), + }), + 'context': , + 'entity_id': 'select.aqara_fp1_trigger_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'medium', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..7f12292abbd --- /dev/null +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -0,0 +1,2297 @@ +# serializer version: 1 +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.clip_flur', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CLIP Flur', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '/sensors/3-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_flur-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'CLIP Flur', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.clip_flur', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.clip_light_level_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CLIP light level sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-00-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.clip_light_level_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'illuminance', + 'friendly_name': 'CLIP light level sensor', + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.clip_light_level_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '999.8', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_level_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light level sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': False, + 'device_class': 'illuminance', + 'friendly_name': 'Light level sensor', + 'on': True, + 'state_class': , + 'temperature': 0.1, + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.light_level_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '999.8', + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.light_level_sensor_temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light level sensor Temperature', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:00-00-internal_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_allow_clip_sensors[config_entry_options0-sensor_payload0][sensor.light_level_sensor_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Light level sensor Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.light_level_sensor_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.1', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'poor', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload0-expected0][sensor.bosch_air_quality_sensor_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '809', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'poor', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'BOSCH Air quality sensor PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload1-expected1][sensor.bosch_air_quality_sensor_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'BOSCH Air quality sensor PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.bosch_air_quality_sensor_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '809', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.fsm_state_motion_stair', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FSM_STATE Motion stair', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'fsm-state-1520195376277-status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload10-expected10][sensor.fsm_state_motion_stair-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'FSM_STATE Motion stair', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.fsm_state_motion_stair', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'humidity', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.55', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0405-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload11-expected11][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.soil_sensor_low_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soil Sensor Low Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-low_battery', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Soil Sensor Low Battery', + }), + 'context': , + 'entity_id': 'binary_sensor.soil_sensor_low_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.soil_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soil Sensor', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Soil Sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soil_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72.13', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.soil_sensor_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soil Sensor Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Soil Sensor Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.soil_sensor_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.motion_sensor_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-light_level', + 'unit_of_measurement': 'lx', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': True, + 'daylight': False, + 'device_class': 'illuminance', + 'friendly_name': 'Motion sensor 4', + 'on': True, + 'state_class': , + 'unit_of_measurement': 'lx', + }), + 'context': , + 'entity_id': 'sensor.motion_sensor_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.motion_sensor_4_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Motion sensor 4 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:03:28:8c:9b-02-0400-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload13-expected13][sensor.motion_sensor_4_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'dark': True, + 'daylight': False, + 'device_class': 'battery', + 'friendly_name': 'Motion sensor 4 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.motion_sensor_4_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.starkvind_airpurifier_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'STARKVIND AirPurifier PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload14-expected14][sensor.starkvind_airpurifier_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'STARKVIND AirPurifier PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.starkvind_airpurifier_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.power_16', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power 16', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0b04-power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload15-expected15][sensor.power_16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current': 34, + 'device_class': 'power', + 'friendly_name': 'Power 16', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + 'voltage': 231, + }), + 'context': , + 'entity_id': 'sensor.power_16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '64', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1010', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0403-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload16-expected16][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mi_temperature_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mi temperature 1', + 'on': True, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '21.82', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mi temperature 1 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:45:dc:53-01-0402-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload17-expected17][sensor.mi_temperature_1_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Mi temperature 1 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.mi_temperature_1_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.etrv_sejour', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'eTRV Séjour', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'eTRV Séjour', + }), + 'context': , + 'entity_id': 'sensor.etrv_sejour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-11-19T08:07:08+00:00', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.etrv_sejour_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'eTRV Séjour Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cc:cc:cc:ff:fe:38:4d:b3-01-000a-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload18-expected18][sensor.etrv_sejour_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'eTRV Séjour Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.etrv_sejour_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alarm_10', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'safety', + 'friendly_name': 'Alarm 10', + 'on': True, + 'temperature': 26.0, + }), + 'context': , + 'entity_id': 'binary_sensor.alarm_10', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.alarm_10_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alarm 10 Battery', + 'on': True, + 'state_class': , + 'temperature': 26.0, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alarm_10_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.alarm_10_temperature', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Alarm 10 Temperature', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alarm 10 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alarm_10_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26.0', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload2-expected2][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dimmer_switch_3_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dimmer switch 3 Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:17:88:01:02:0e:32:a3-02-fc00-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload20-expected20][sensor.dimmer_switch_3_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'event_id': 'dimmer_switch_3', + 'friendly_name': 'Dimmer switch 3 Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dimmer_switch_3_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '90', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ch2o', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CH2O', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ch2o-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'AirQuality 1 CH2O', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ch2o', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_co2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 CO2', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_co2', + 'unit_of_measurement': 'ppm', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_co2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'AirQuality 1 CO2', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_co2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '359', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_pm25', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AirQuality 1 PM25', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5', + 'unit_of_measurement': 'µg/m³', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_pm25-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pm25', + 'friendly_name': 'AirQuality 1 PM25', + 'state_class': , + 'unit_of_measurement': 'µg/m³', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_pm25', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.airquality_1_ppb', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'AirQuality 1 PPB', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00:00:01-02-0113-air_quality_ppb', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload4-expected4][sensor.airquality_1_ppb-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'AirQuality 1 PPB', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.airquality_1_ppb', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'FYRTUR block-out roller blind Battery', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:ff:fe:01:23:45-01-0001-battery', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload5-expected5][sensor.fyrtur_block_out_roller_blind_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'FYRTUR block-out roller blind Battery', + 'on': True, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.fyrtur_block_out_roller_blind_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.carbondioxide_35', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CarbonDioxide 35', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload6-expected6][sensor.carbondioxide_35-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'CarbonDioxide 35', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.carbondioxide_35', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '370', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.consumption_15', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption 15', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:0d:6f:00:0b:7a:64:29-01-0702-consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload7-expected7][sensor.consumption_15-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Consumption 15', + 'on': True, + 'power': 123, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.consumption_15', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11.342', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.daylight', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:white-balance-sunny', + 'original_name': 'Daylight', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01:23:4E:FF:FF:56:78:9A-01-daylight_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload8-expected8][sensor.daylight-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'daylight': True, + 'friendly_name': 'Daylight', + 'icon': 'mdi:white-balance-sunny', + 'on': True, + }), + 'context': , + 'entity_id': 'sensor.daylight', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'solar_noon', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.formaldehyde_34', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Formaldehyde 34', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde', + 'unit_of_measurement': 'ppb', + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload9-expected9][sensor.formaldehyde_34-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'volatile_organic_compounds', + 'friendly_name': 'Formaldehyde 34', + 'state_class': , + 'unit_of_measurement': 'ppb', + }), + 'context': , + 'entity_id': 'sensor.formaldehyde_34', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/deconz/test_alarm_control_panel.py b/tests/components/deconz/test_alarm_control_panel.py index c855076de2f..6c47146f9b0 100644 --- a/tests/components/deconz/test_alarm_control_panel.py +++ b/tests/components/deconz/test_alarm_control_panel.py @@ -1,8 +1,11 @@ """deCONZ alarm control panel platform tests.""" +from collections.abc import Callable from unittest.mock import patch from pydeconz.models.sensor.ancillary_control import AncillaryControlPanel +import pytest +from syrupy import SnapshotAssertion from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, @@ -21,34 +24,21 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_PENDING, STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, - STATE_UNKNOWN, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no climate entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_alarm_control_panel( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test successful creation of alarm control panel entities.""" - data = { - "alarmsystems": { +@pytest.mark.parametrize( + "alarm_system_payload", + [ + { "0": { "name": "default", "config": { @@ -75,230 +65,95 @@ async def test_alarm_control_panel( }, }, } - }, - "sensors": { - "0": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "none", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 4 - assert hass.states.get("alarm_control_panel.keypad").state == STATE_UNKNOWN - - # Event signals alarm control panel armed away - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_AWAY - - # Event signals alarm control panel armed night - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_NIGHT}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert ( - hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_NIGHT - ) - - # Event signals alarm control panel armed home - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.ARMED_STAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMED_HOME - - # Event signals alarm control panel disarmed - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.DISARMED}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_DISARMED - - # Event signals alarm control panel arming - - for arming_event in ( - AncillaryControlPanel.ARMING_AWAY, - AncillaryControlPanel.ARMING_NIGHT, - AncillaryControlPanel.ARMING_STAY, - ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": arming_event}, } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_ARMING - - # Event signals alarm control panel pending - - for pending_event in ( - AncillaryControlPanel.ENTRY_DELAY, - AncillaryControlPanel.EXIT_DELAY, - ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": pending_event}, + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "none", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + ], +) +async def test_alarm_control_panel( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, +) -> None: + """Test successful creation of alarm control panel entities.""" + with patch( + "homeassistant.components.deconz.PLATFORMS", [Platform.ALARM_CONTROL_PANEL] + ): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - assert ( - hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_PENDING - ) - - # Event signals alarm control panel triggered - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.IN_ALARM}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED - - # Event signals alarm control panel unknown state keeps previous state - - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"panel": AncillaryControlPanel.NOT_READY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - - assert hass.states.get("alarm_control_panel.keypad").state == STATE_ALARM_TRIGGERED + for action, state in ( + # Event signals alarm control panel armed state + (AncillaryControlPanel.ARMED_AWAY, STATE_ALARM_ARMED_AWAY), + (AncillaryControlPanel.ARMED_NIGHT, STATE_ALARM_ARMED_NIGHT), + (AncillaryControlPanel.ARMED_STAY, STATE_ALARM_ARMED_HOME), + (AncillaryControlPanel.DISARMED, STATE_ALARM_DISARMED), + # Event signals alarm control panel arming state + (AncillaryControlPanel.ARMING_AWAY, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_NIGHT, STATE_ALARM_ARMING), + (AncillaryControlPanel.ARMING_STAY, STATE_ALARM_ARMING), + # Event signals alarm control panel pending state + (AncillaryControlPanel.ENTRY_DELAY, STATE_ALARM_PENDING), + (AncillaryControlPanel.EXIT_DELAY, STATE_ALARM_PENDING), + # Event signals alarm control panel triggered state + (AncillaryControlPanel.IN_ALARM, STATE_ALARM_TRIGGERED), + # Event signals alarm control panel unknown state keeps previous state + (AncillaryControlPanel.NOT_READY, STATE_ALARM_TRIGGERED), + ): + await sensor_ws_data({"state": {"panel": action}}) + assert hass.states.get("alarm_control_panel.keypad").state == state # Verify service calls - # Service set alarm to away mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_away" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_AWAY, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "1234"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[1][2] == {"code0": "1234"} - - # Service set alarm to home mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_stay" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_HOME, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "2345"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[2][2] == {"code0": "2345"} - - # Service set alarm to night mode - - mock_deconz_put_request( - aioclient_mock, config_entry.data, "/alarmsystems/0/arm_night" - ) - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_ARM_NIGHT, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "3456"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[3][2] == {"code0": "3456"} - - # Service set alarm to disarmed - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/alarmsystems/0/disarm") - - await hass.services.async_call( - ALARM_CONTROL_PANEL_DOMAIN, - SERVICE_ALARM_DISARM, - {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: "4567"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"code0": "4567"} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + for path, service, code in ( + # Service set alarm to away mode + ("arm_away", SERVICE_ALARM_ARM_AWAY, "1234"), + # Service set alarm to home mode + ("arm_stay", SERVICE_ALARM_ARM_HOME, "2345"), + # Service set alarm to night mode + ("arm_night", SERVICE_ALARM_ARM_NIGHT, "3456"), + # Service set alarm to disarmed + ("disarm", SERVICE_ALARM_DISARM, "4567"), + ): + aioclient_mock.mock_calls.clear() + aioclient_mock = mock_put_request(f"/alarmsystems/0/{path}") + await hass.services.async_call( + ALARM_CONTROL_PANEL_DOMAIN, + service, + {ATTR_ENTITY_ID: "alarm_control_panel.keypad", ATTR_CODE: code}, + blocking=True, + ) + assert aioclient_mock.mock_calls[0][2] == {"code0": code} diff --git a/tests/components/deconz/test_binary_sensor.py b/tests/components/deconz/test_binary_sensor.py index 6ab5f2f5477..59d31afb9fc 100644 --- a/tests/components/deconz/test_binary_sensor.py +++ b/tests/components/deconz/test_binary_sensor.py @@ -1,10 +1,12 @@ """deCONZ binary sensor platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.deconz.const import ( CONF_ALLOW_CLIP_SENSOR, CONF_ALLOW_NEW_DEVICES, @@ -12,32 +14,13 @@ from homeassistant.components.deconz.const import ( DOMAIN as DECONZ_DOMAIN, ) from homeassistant.components.deconz.services import SERVICE_DEVICE_REFRESH -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - STATE_OFF, - STATE_ON, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_request, - setup_deconz_integration, -) - -from tests.test_util.aiohttp import AiohttpClientMocker - - -async def test_no_binary_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import MockConfigEntry, snapshot_platform TEST_DATA = [ ( # Alarm binary sensor @@ -64,19 +47,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.alarm_10", - "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-alarm", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.SAFETY, - "attributes": { - "on": True, - "temperature": 26.0, - "device_class": "safety", - "friendly_name": "Alarm 10", - }, "websocket_event": {"alarm": True}, "next_state": STATE_ON, }, @@ -105,18 +76,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.cave_co", - "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-carbon_monoxide", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.CO, - "attributes": { - "on": True, - "device_class": "carbon_monoxide", - "friendly_name": "Cave CO", - }, "websocket_event": {"carbonmonoxide": True}, "next_state": STATE_ON, }, @@ -140,18 +100,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke", - "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-fire", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.SMOKE, - "attributes": { - "on": True, - "device_class": "smoke", - "friendly_name": "sensor_kitchen_smoke", - }, "websocket_event": {"fire": True}, "next_state": STATE_ON, }, @@ -176,17 +125,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:01:d9:3e:7c-01-0500", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "binary_sensor.sensor_kitchen_smoke_test_mode", - "unique_id": "00:15:8d:00:01:d9:3e:7c-01-0500-in_test_mode", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.SMOKE, - "attributes": { - "device_class": "smoke", - "friendly_name": "sensor_kitchen_smoke Test Mode", - }, "websocket_event": {"test": True}, "next_state": STATE_ON, }, @@ -208,17 +147,7 @@ TEST_DATA = [ "uniqueid": "kitchen-switch", }, { - "entity_count": 1, - "device_count": 2, "entity_id": "binary_sensor.kitchen_switch", - "unique_id": "kitchen-switch-flag", - "state": STATE_ON, - "entity_category": None, - "device_class": None, - "attributes": { - "on": True, - "friendly_name": "Kitchen Switch", - }, "websocket_event": {"flag": False}, "next_state": STATE_OFF, }, @@ -245,19 +174,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2b:96:b4-01-0006", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.back_door", - "unique_id": "00:15:8d:00:02:2b:96:b4-01-0006-open", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.OPENING, - "attributes": { - "on": True, - "temperature": 33.0, - "device_class": "opening", - "friendly_name": "Back Door", - }, "websocket_event": {"open": True}, "next_state": STATE_ON, }, @@ -291,19 +208,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0406", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.motion_sensor_4", - "unique_id": "00:17:88:01:03:28:8c:9b-02-0406-presence", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.MOTION, - "attributes": { - "on": True, - "dark": False, - "device_class": "motion", - "friendly_name": "Motion sensor 4", - }, "websocket_event": {"presence": True}, "next_state": STATE_ON, }, @@ -332,19 +237,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:2f:07:db-01-0500", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "binary_sensor.water2", - "unique_id": "00:15:8d:00:02:2f:07:db-01-0500-water", - "state": STATE_OFF, - "entity_category": None, - "device_class": BinarySensorDeviceClass.MOISTURE, - "attributes": { - "on": True, - "temperature": 25.0, - "device_class": "moisture", - "friendly_name": "water2", - }, "websocket_event": {"water": True}, "next_state": STATE_ON, }, @@ -377,22 +270,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:a5:21:24-01-0101", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "binary_sensor.vibration_1", - "unique_id": "00:15:8d:00:02:a5:21:24-01-0101-vibration", - "state": STATE_ON, - "entity_category": None, - "device_class": BinarySensorDeviceClass.VIBRATION, - "attributes": { - "on": True, - "temperature": 32.0, - "orientation": [10, 1059, 0], - "tiltangle": 83, - "vibrationstrength": 114, - "device_class": "vibration", - "friendly_name": "Vibration 1", - }, "websocket_event": {"vibration": False}, "next_state": STATE_OFF, }, @@ -415,17 +293,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.presence_sensor_tampered", - "unique_id": "00:00:00:00:00:00:00:00-00-tampered", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.TAMPER, - "attributes": { - "device_class": "tamper", - "friendly_name": "Presence sensor Tampered", - }, "websocket_event": {"tampered": True}, "next_state": STATE_ON, }, @@ -448,17 +316,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "binary_sensor.presence_sensor_low_battery", - "unique_id": "00:00:00:00:00:00:00:00-00-low_battery", - "state": STATE_OFF, - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": BinarySensorDeviceClass.BATTERY, - "attributes": { - "device_class": "battery", - "friendly_name": "Presence sensor Low Battery", - }, "websocket_event": {"lowbattery": True}, "next_state": STATE_ON, }, @@ -466,99 +324,50 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_binary_sensors( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of binary sensor entities.""" - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - sensor = hass.states.get(expected["entity_id"]) - assert sensor.state == expected["state"] - assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] - assert sensor.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BINARY_SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": expected["websocket_event"], - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": expected["websocket_event"]}) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_not_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP presence sensor", - "type": "CLIPPresence", - "state": {"presence": False}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP presence sensor", + "type": "CLIPPresence", + "state": {"presence": False}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: + """Test that CLIP sensors are not allowed.""" assert len(hass.states.async_all()) == 0 -async def test_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors can be allowed.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Presence sensor", "type": "ZHAPresence", @@ -585,12 +394,13 @@ async def test_allow_clip_sensor( "uniqueid": "/sensors/3", }, } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_allow_clip_sensor( + hass: HomeAssistant, config_entry_setup: MockConfigEntry +) -> None: + """Test that CLIP sensors can be allowed.""" assert len(hass.states.async_all()) == 3 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF @@ -600,7 +410,7 @@ async def test_allow_clip_sensor( # Disallow clip sensors hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_CLIP_SENSOR: False} + config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: False} ) await hass.async_block_till_done() @@ -611,7 +421,7 @@ async def test_allow_clip_sensor( # Allow clip sensors hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_CLIP_SENSOR: True} + config_entry_setup, options={CONF_ALLOW_CLIP_SENSOR: True} ) await hass.async_block_till_done() @@ -620,15 +430,16 @@ async def test_allow_clip_sensor( assert hass.states.get("binary_sensor.clip_flag_boot_time").state == STATE_ON +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_binary_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor works.""" + assert len(hass.states.async_all()) == 0 + event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Presence sensor id", "name": "Presence sensor", @@ -638,22 +449,21 @@ async def test_add_new_binary_sensor( "uniqueid": "00:00:00:00:00:00:00:00-00", }, } - - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 1 assert hass.states.get("binary_sensor.presence_sensor").state == STATE_OFF +@pytest.mark.parametrize( + "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] +) async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( hass: HomeAssistant, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + deconz_payload: dict[str, Any], + mock_requests: Callable[[str], None], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -663,36 +473,24 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } - event_added_sensor = { - "t": "event", - "e": "added", - "r": "sensors", - "id": "1", - "sensor": sensor, - } - - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, - ) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"e": "added", "sensor": sensor}) assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 0 ) - aioclient_mock.clear_requests() - data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} - mock_deconz_request(aioclient_mock, config_entry.data, data) + deconz_payload["sensors"]["0"] = sensor + mock_requests() await hass.services.async_call(DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH) await hass.async_block_till_done() @@ -701,11 +499,16 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_service_call( assert hass.states.get("binary_sensor.presence_sensor") +@pytest.mark.parametrize( + "config_entry_options", [{CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}] +) async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + deconz_payload: dict[str, Any], + mock_requests: Callable[[str], None], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new binary sensor is not allowed.""" sensor = { @@ -715,39 +518,27 @@ async def test_add_new_binary_sensor_ignored_load_entities_on_options_change( "config": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", } - event_added_sensor = { - "t": "event", - "e": "added", - "r": "sensors", - "id": "1", - "sensor": sensor, - } - - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_MASTER_GATEWAY: True, CONF_ALLOW_NEW_DEVICES: False}, - ) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"e": "added", "sensor": sensor}) assert len(hass.states.async_all()) == 0 assert not hass.states.get("binary_sensor.presence_sensor") assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 0 ) - aioclient_mock.clear_requests() - data = {"config": {}, "groups": {}, "lights": {}, "sensors": {"1": sensor}} - mock_deconz_request(aioclient_mock, config_entry.data, data) + deconz_payload["sensors"]["0"] = sensor + mock_requests() hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_NEW_DEVICES: True} + config_entry_setup, options={CONF_ALLOW_NEW_DEVICES: True} ) await hass.async_block_till_done() diff --git a/tests/components/deconz/test_button.py b/tests/components/deconz/test_button.py index 4d85270ddca..c649dba5b00 100644 --- a/tests/components/deconz/test_button.py +++ b/tests/components/deconz/test_button.py @@ -1,31 +1,22 @@ """deCONZ button platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_binary_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Store scene button { @@ -42,15 +33,7 @@ TEST_DATA = [ } }, { - "entity_count": 2, - "device_count": 3, "entity_id": "button.light_group_scene_store_current_scene", - "unique_id": "01234E56789A/groups/1/scenes/1-store", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "icon": "mdi:inbox-arrow-down", - "friendly_name": "Light group Scene Store Current Scene", - }, "request": "/groups/1/scenes/1/store", "request_data": {}, }, @@ -84,15 +67,7 @@ TEST_DATA = [ } }, { - "entity_count": 5, - "device_count": 3, "entity_id": "button.aqara_fp1_reset_presence", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-reset_presence", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "device_class": "restart", - "friendly_name": "Aqara FP1 Reset Presence", - }, "request": "/sensors/1/config", "request_data": {"resetpresence": True}, }, @@ -100,42 +75,24 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("deconz_payload", "expected"), TEST_DATA) async def test_button( hass: HomeAssistant, entity_registry: er.EntityRegistry, - device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of button entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - button = hass.states.get(expected["entity_id"]) - assert button.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.BUTTON]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify button press - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( BUTTON_DOMAIN, @@ -144,14 +101,3 @@ async def test_button( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_climate.py b/tests/components/deconz/test_climate.py index 0e51f31cec4..7f456e81976 100644 --- a/tests/components/deconz/test_climate.py +++ b/tests/components/deconz/test_climate.py @@ -1,8 +1,10 @@ """deCONZ climate platform tests.""" +from collections.abc import Callable from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.climate import ( ATTR_FAN_MODE, @@ -11,15 +13,10 @@ from homeassistant.components.climate import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, DOMAIN as CLIMATE_DOMAIN, - FAN_AUTO, - FAN_HIGH, - FAN_LOW, - FAN_MEDIUM, FAN_OFF, FAN_ON, PRESET_BOOST, PRESET_COMFORT, - PRESET_ECO, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -30,106 +27,74 @@ from homeassistant.components.climate import ( from homeassistant.components.deconz.climate import ( DECONZ_FAN_SMART, DECONZ_PRESET_AUTO, - DECONZ_PRESET_COMPLEX, - DECONZ_PRESET_HOLIDAY, DECONZ_PRESET_MANUAL, ) from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_TEMPERATURE, - STATE_OFF, - STATE_UNAVAILABLE, -) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, STATE_OFF, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no climate entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 59, + "displayflipped": None, + "heatsetpoint": 2100, + "locked": True, + "mountingmode": None, + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "6130553ac247174809bae47144ee23f8", + "lastseen": "2020-11-29T19:31Z", + "manufacturername": "Danfoss", + "modelid": "eTRV0100", + "name": "thermostat", + "state": { + "errorcode": None, + "lastupdated": "2020-11-29T19:28:40.665", + "mountingmodeactive": False, + "on": True, + "temperature": 2102, + "valve": 24, + "windowopen": "Closed", + }, + "swversion": "01.02.0008 01.02", + "type": "ZHAThermostat", + "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", + } + ], +) async def test_simple_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of climate entities. This is a simple water heater that only supports setting temperature and on and off. """ - data = { - "sensors": { - "0": { - "config": { - "battery": 59, - "displayflipped": None, - "heatsetpoint": 2100, - "locked": True, - "mountingmode": None, - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "6130553ac247174809bae47144ee23f8", - "lastseen": "2020-11-29T19:31Z", - "manufacturername": "Danfoss", - "modelid": "eTRV0100", - "name": "thermostat", - "state": { - "errorcode": None, - "lastupdated": "2020-11-29T19:28:40.665", - "mountingmodeactive": False, - "on": True, - "temperature": 2102, - "valve": 24, - "windowopen": "Closed", - }, - "swversion": "01.02.0008 01.02", - "type": "ZHAThermostat", - "uniqueid": "14:b4:57:ff:fe:d5:4e:77-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - ] - assert climate_thermostat.attributes["current_temperature"] == 21.0 - assert climate_thermostat.attributes["temperature"] == 21.0 - assert climate_thermostat.attributes["locked"] is True - assert hass.states.get("sensor.thermostat_battery").state == "59" - assert climate_thermostat.attributes["hvac_action"] == HVACAction.HEATING + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat configured off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -138,16 +103,7 @@ async def test_simple_climate_device( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": True}}) assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -156,7 +112,7 @@ async def test_simple_climate_device( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service turn on thermostat @@ -189,61 +145,40 @@ async def test_simple_climate_device( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) async def test_climate_device_without_cooling_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "1": { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.AUTO - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - HVACMode.AUTO, - ] - assert climate_thermostat.attributes["current_temperature"] == 22.6 - assert climate_thermostat.attributes["temperature"] == 22.0 - assert hass.states.get("sensor.thermostat") is None - assert hass.states.get("sensor.thermostat_battery").state == "100" - assert hass.states.get("climate.presence_sensor") is None - assert hass.states.get("climate.clip_thermostat") is None - assert ( - hass.states.get("climate.thermostat").attributes["hvac_action"] - == HVACAction.HEATING - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat configured off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"mode": "off"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "off"}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -252,17 +187,7 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"mode": "other"}, - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "other"}, "state": {"on": True}}) assert hass.states.get("climate.thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -271,16 +196,7 @@ async def test_climate_device_without_cooling_support( # Event signals thermostat state off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == STATE_OFF assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -289,7 +205,7 @@ async def test_climate_device_without_cooling_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set HVAC mode to auto @@ -355,83 +271,53 @@ async def test_climate_device_without_cooling_support( blocking=True, ) - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": 1111, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_cooling_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": 1111, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["hvac_modes"] == [ - HVACMode.HEAT, - HVACMode.OFF, - HVACMode.AUTO, - HVACMode.COOL, - ] - assert climate_thermostat.attributes["current_temperature"] == 23.2 - assert climate_thermostat.attributes["temperature"] == 22.2 - assert hass.states.get("sensor.zen_01_battery").state == "25" - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals thermostat mode cool - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"mode": "cool"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"mode": "cool"}}) assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert hass.states.get("climate.zen_01").attributes["temperature"] == 11.1 assert ( @@ -440,16 +326,7 @@ async def test_climate_device_with_cooling_support( # Event signals thermostat state on - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": True}}) assert hass.states.get("climate.zen_01").state == HVACMode.COOL assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -458,7 +335,7 @@ async def test_climate_device_with_cooling_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set temperature to 20 @@ -471,71 +348,52 @@ async def test_climate_device_with_cooling_support( assert aioclient_mock.mock_calls[1][2] == {"coolsetpoint": 2000.0} +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": "auto", + "heatsetpoint": 2222, + "mode": "heat", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_fan_support( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": "auto", - "heatsetpoint": 2222, - "mode": "heat", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["fan_mode"] == FAN_AUTO - assert climate_thermostat.attributes["fan_modes"] == [ - DECONZ_FAN_SMART, - FAN_AUTO, - FAN_HIGH, - FAN_MEDIUM, - FAN_LOW, - FAN_ON, - FAN_OFF, - ] - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals fan mode defaults to off - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_OFF assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE @@ -543,17 +401,7 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}, "state": {"on": True}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -562,16 +410,7 @@ async def test_climate_device_with_fan_support( # Event signals unsupported fan mode - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"fanmode": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"fanmode": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["fan_mode"] == FAN_ON assert ( hass.states.get("climate.zen_01").attributes["hvac_action"] @@ -580,7 +419,7 @@ async def test_climate_device_with_fan_support( # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set fan mode to off @@ -613,75 +452,53 @@ async def test_climate_device_with_fan_support( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "coolsetpoint": None, + "fanmode": None, + "heatsetpoint": 2222, + "mode": "heat", + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": { + "lastupdated": "2020-11-27T13:42:40.863", + "on": False, + "temperature": 2320, + }, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", + } + ], +) async def test_climate_device_with_preset( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "coolsetpoint": None, - "fanmode": None, - "heatsetpoint": 2222, - "mode": "heat", - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": { - "lastupdated": "2020-11-27T13:42:40.863", - "on": False, - "temperature": 2320, - }, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - - climate_zen_01 = hass.states.get("climate.zen_01") - assert climate_zen_01.state == HVACMode.HEAT - assert climate_zen_01.attributes["current_temperature"] == 23.2 - assert climate_zen_01.attributes["temperature"] == 22.2 - assert climate_zen_01.attributes["preset_mode"] == DECONZ_PRESET_AUTO - assert climate_zen_01.attributes["preset_modes"] == [ - DECONZ_PRESET_AUTO, - PRESET_BOOST, - PRESET_COMFORT, - DECONZ_PRESET_COMPLEX, - PRESET_ECO, - DECONZ_PRESET_HOLIDAY, - DECONZ_PRESET_MANUAL, - ] - assert ( - hass.states.get("climate.zen_01").attributes["hvac_action"] == HVACAction.IDLE - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals deCONZ preset - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "manual"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"preset": "manual"}}) assert ( hass.states.get("climate.zen_01").attributes["preset_mode"] == DECONZ_PRESET_MANUAL @@ -689,21 +506,12 @@ async def test_climate_device_with_preset( # Event signals unknown preset - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "unsupported"}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"config": {"preset": "unsupported"}}) assert hass.states.get("climate.zen_01").attributes["preset_mode"] is None # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set preset to HASS preset @@ -736,12 +544,10 @@ async def test_climate_device_with_preset( ) -async def test_clip_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test successful creation of sensor entities.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Thermostat", "type": "ZHAThermostat", @@ -763,18 +569,19 @@ async def test_clip_climate_device( "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) - - assert len(hass.states.async_all()) == 3 - assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT - assert ( - hass.states.get("climate.clip_thermostat").attributes["hvac_action"] - == HVACAction.HEATING - ) + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_clip_climate_device( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Test successful creation of sensor entities.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CLIMATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Disallow clip sensors @@ -783,7 +590,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 2 + assert len(hass.states.async_all()) == 1 assert not hass.states.get("climate.clip_thermostat") # Allow clip sensors @@ -793,7 +600,7 @@ async def test_clip_climate_device( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == 3 + assert len(hass.states.async_all()) == 2 assert hass.states.get("climate.clip_thermostat").state == HVACMode.HEAT assert ( hass.states.get("climate.clip_thermostat").attributes["hvac_action"] @@ -801,46 +608,37 @@ async def test_clip_climate_device( ) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Thermostat", + "type": "ZHAThermostat", + "state": {"on": True, "temperature": 2260, "valve": 30}, + "config": { + "battery": 100, + "heatsetpoint": 2200, + "mode": "auto", + "offset": 10, + "reachable": True, + }, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_verify_state_update( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that state update properly.""" - data = { - "sensors": { - "1": { - "name": "Thermostat", - "type": "ZHAThermostat", - "state": {"on": True, "temperature": 2260, "valve": 30}, - "config": { - "battery": 100, - "heatsetpoint": 2200, - "mode": "auto", - "offset": 10, - "reachable": True, - }, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] == HVACAction.HEATING ) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"on": False}}) assert hass.states.get("climate.thermostat").state == HVACMode.AUTO assert ( hass.states.get("climate.thermostat").attributes["hvac_action"] @@ -848,15 +646,14 @@ async def test_verify_state_update( ) +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_climate_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new climate device works.""" event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Thermostat id", "name": "Thermostat", @@ -873,11 +670,9 @@ async def test_add_new_climate_device( }, } - await setup_deconz_integration(hass, aioclient_mock) assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 2 assert hass.states.get("climate.thermostat").state == HVACMode.AUTO @@ -888,141 +683,115 @@ async def test_add_new_climate_device( ) -async def test_not_allow_clip_thermostat( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP thermostat sensor", + "type": "CLIPThermostat", + "state": {}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_thermostat(hass: HomeAssistant) -> None: """Test that CLIP thermostats are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP thermostat sensor", - "type": "CLIPThermostat", - "state": {}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - assert len(hass.states.async_all()) == 0 -async def test_no_mode_no_state( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that a climate device without mode and state works.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 25, - "heatsetpoint": 2222, - "mode": None, - "preset": "auto", - "offset": 0, - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "074549903686a77a12ef0f06c499b1ef", - "lastseen": "2020-11-27T13:45Z", - "manufacturername": "Zen Within", - "modelid": "Zen-01", - "name": "Zen-01", - "state": {"lastupdated": "none", "on": None, "temperature": 2290}, - "type": "ZHAThermostat", - "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", - } +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 25, + "heatsetpoint": 2222, + "mode": None, + "preset": "auto", + "offset": 0, + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "074549903686a77a12ef0f06c499b1ef", + "lastseen": "2020-11-27T13:45Z", + "manufacturername": "Zen Within", + "modelid": "Zen-01", + "name": "Zen-01", + "state": {"lastupdated": "none", "on": None, "temperature": 2290}, + "type": "ZHAThermostat", + "uniqueid": "00:24:46:00:00:11:6f:56-01-0201", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_no_mode_no_state(hass: HomeAssistant) -> None: + """Test that a climate device without mode and state works.""" assert len(hass.states.async_all()) == 2 climate_thermostat = hass.states.get("climate.zen_01") - assert climate_thermostat.state is STATE_OFF assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_AUTO assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE - # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 58, + "heatsetpoint": 2200, + "locked": False, + "mode": "heat", + "offset": -200, + "on": True, + "preset": "manual", + "reachable": True, + "schedule": {}, + "schedule_on": False, + "setvalve": False, + "windowopen_set": False, + }, + "ep": 1, + "etag": "404c15db68c318ebe7832ce5aa3d1e30", + "lastannounced": "2022-08-31T03:00:59Z", + "lastseen": "2022-09-19T11:58Z", + "manufacturername": "_TZE200_b6wax7g0", + "modelid": "TS0601", + "name": "Thermostat", + "state": { + "lastupdated": "2022-09-19T11:58:24.204", + "lowbattery": False, + "on": False, + "temperature": 2200, + "valve": 0, + }, + "type": "ZHAThermostat", + "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_boost_mode( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that a climate device with boost mode and different state works.""" - data = { - "sensors": { - "0": { - "config": { - "battery": 58, - "heatsetpoint": 2200, - "locked": False, - "mode": "heat", - "offset": -200, - "on": True, - "preset": "manual", - "reachable": True, - "schedule": {}, - "schedule_on": False, - "setvalve": False, - "windowopen_set": False, - }, - "ep": 1, - "etag": "404c15db68c318ebe7832ce5aa3d1e30", - "lastannounced": "2022-08-31T03:00:59Z", - "lastseen": "2022-09-19T11:58Z", - "manufacturername": "_TZE200_b6wax7g0", - "modelid": "TS0601", - "name": "Thermostat", - "state": { - "lastupdated": "2022-09-19T11:58:24.204", - "lowbattery": False, - "on": False, - "temperature": 2200, - "valve": 0, - }, - "type": "ZHAThermostat", - "uniqueid": "84:fd:27:ff:fe:8a:eb:89-01-0201", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 3 climate_thermostat = hass.states.get("climate.thermostat") - assert climate_thermostat.state == HVACMode.HEAT - assert climate_thermostat.attributes["preset_mode"] is DECONZ_PRESET_MANUAL assert climate_thermostat.attributes["hvac_action"] is HVACAction.IDLE # Event signals thermostat preset boost and valve 100 (real data) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - "config": {"preset": "boost"}, - "state": {"valve": 100}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"config": {"preset": "boost"}, "state": {"valve": 100}}) climate_thermostat = hass.states.get("climate.thermostat") assert climate_thermostat.attributes["preset_mode"] is PRESET_BOOST assert climate_thermostat.attributes["hvac_action"] is HVACAction.HEATING - - # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 6da940e0918..49711962407 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -32,8 +32,9 @@ from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .test_gateway import API_KEY, BRIDGEID, setup_deconz_integration +from .conftest import API_KEY, BRIDGE_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker BAD_BRIDGEID = "0000000000000000" @@ -47,7 +48,7 @@ async def test_flow_discovered_bridges( aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[ - {"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}, + {"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}, {"id": "1234E567890A", "internalipaddress": "5.6.7.8", "internalport": 80}, ], headers={"content-type": CONTENT_TYPE_JSON}, @@ -78,7 +79,7 @@ async def test_flow_discovered_bridges( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -92,7 +93,7 @@ async def test_flow_manual_configuration_decision( """Test that config flow for one discovered bridge works.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -123,7 +124,7 @@ async def test_flow_manual_configuration_decision( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -132,7 +133,7 @@ async def test_flow_manual_configuration_decision( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -174,7 +175,7 @@ async def test_flow_manual_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -183,7 +184,7 @@ async def test_flow_manual_configuration( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -222,11 +223,11 @@ async def test_manual_configuration_after_discovery_ResponseError( async def test_manual_configuration_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Test that manual configuration can update existing config entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -256,7 +257,7 @@ async def test_manual_configuration_update_configuration( aioclient_mock.get( f"http://2.3.4.5:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -266,15 +267,14 @@ async def test_manual_configuration_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" +@pytest.mark.usefixtures("config_entry_setup") async def test_manual_configuration_dont_update_configuration( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that _create_entry work and that bridgeid can be requested.""" - await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.get( pydeconz.utils.URL_DISCOVER, json=[], @@ -304,7 +304,7 @@ async def test_manual_configuration_dont_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{API_KEY}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -367,12 +367,15 @@ async def test_manual_configuration_timeout_get_bridge( ], ) async def test_link_step_fails( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, raised_error, error_string + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + raised_error: Exception, + error_string: str, ) -> None: """Test config flow should abort if no API key was possible to retrieve.""" aioclient_mock.get( pydeconz.utils.URL_DISCOVER, - json=[{"id": BRIDGEID, "internalipaddress": "1.2.3.4", "internalport": 80}], + json=[{"id": BRIDGE_ID, "internalipaddress": "1.2.3.4", "internalport": 80}], headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -399,14 +402,14 @@ async def test_link_step_fails( async def test_reauth_flow_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Verify reauth flow can update gateway API key.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, - data=config_entry.data, + data=config_entry_setup.data, context={"source": SOURCE_REAUTH}, ) @@ -423,7 +426,7 @@ async def test_reauth_flow_update_configuration( aioclient_mock.get( f"http://1.2.3.4:80/api/{new_api_key}/config", - json={"bridgeid": BRIDGEID}, + json={"bridgeid": BRIDGE_ID}, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -433,7 +436,7 @@ async def test_reauth_flow_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_API_KEY] == new_api_key + assert config_entry_setup.data[CONF_API_KEY] == new_api_key async def test_flow_ssdp_discovery( @@ -448,7 +451,7 @@ async def test_flow_ssdp_discovery( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -472,7 +475,7 @@ async def test_flow_ssdp_discovery( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == BRIDGEID + assert result["title"] == BRIDGE_ID assert result["data"] == { CONF_HOST: "1.2.3.4", CONF_PORT: 80, @@ -481,11 +484,9 @@ async def test_flow_ssdp_discovery( async def test_ssdp_discovery_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test if a discovered bridge is configured but updates with new attributes.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -498,7 +499,7 @@ async def test_ssdp_discovery_update_configuration( ssdp_location="http://2.3.4.5:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -507,15 +508,14 @@ async def test_ssdp_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" assert len(mock_setup_entry.mock_calls) == 1 async def test_ssdp_discovery_dont_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test if a discovered bridge has already been configured.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, @@ -525,7 +525,7 @@ async def test_ssdp_discovery_dont_update_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -533,17 +533,14 @@ async def test_ssdp_discovery_dont_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" +@pytest.mark.parametrize("config_entry_source", [SOURCE_HASSIO]) async def test_ssdp_discovery_dont_update_existing_hassio_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test to ensure the SSDP discovery does not update an Hass.io entry.""" - config_entry = await setup_deconz_integration( - hass, aioclient_mock, source=SOURCE_HASSIO - ) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=ssdp.SsdpServiceInfo( @@ -552,7 +549,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( ssdp_location="http://1.2.3.4:80/", upnp={ ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, + ATTR_UPNP_SERIAL: BRIDGE_ID, }, ), context={"source": SOURCE_SSDP}, @@ -560,7 +557,7 @@ async def test_ssdp_discovery_dont_update_existing_hassio_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry_setup.data[CONF_HOST] == "1.2.3.4" async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: @@ -572,7 +569,7 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: "addon": "Mock Addon", CONF_HOST: "mock-deconz", CONF_PORT: 80, - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, CONF_API_KEY: API_KEY, }, name="Mock Addon", @@ -610,11 +607,10 @@ async def test_flow_hassio_discovery(hass: HomeAssistant) -> None: async def test_hassio_discovery_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, ) -> None: """Test we can update an existing config entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - with patch( "homeassistant.components.deconz.async_setup_entry", return_value=True, @@ -626,7 +622,7 @@ async def test_hassio_discovery_update_configuration( CONF_HOST: "2.3.4.5", CONF_PORT: 8080, CONF_API_KEY: "updated", - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, }, name="Mock Addon", slug="deconz", @@ -638,18 +634,15 @@ async def test_hassio_discovery_update_configuration( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data[CONF_HOST] == "2.3.4.5" - assert config_entry.data[CONF_PORT] == 8080 - assert config_entry.data[CONF_API_KEY] == "updated" + assert config_entry_setup.data[CONF_HOST] == "2.3.4.5" + assert config_entry_setup.data[CONF_PORT] == 8080 + assert config_entry_setup.data[CONF_API_KEY] == "updated" assert len(mock_setup_entry.mock_calls) == 1 -async def test_hassio_discovery_dont_update_configuration( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_hassio_discovery_dont_update_configuration(hass: HomeAssistant) -> None: """Test we can update an existing config entry.""" - await setup_deconz_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( DECONZ_DOMAIN, data=HassioServiceInfo( @@ -657,7 +650,7 @@ async def test_hassio_discovery_dont_update_configuration( CONF_HOST: "1.2.3.4", CONF_PORT: 80, CONF_API_KEY: API_KEY, - CONF_SERIAL: BRIDGEID, + CONF_SERIAL: BRIDGE_ID, }, name="Mock Addon", slug="deconz", @@ -671,12 +664,10 @@ async def test_hassio_discovery_dont_update_configuration( async def test_option_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test config flow options.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) + result = await hass.config_entries.options.async_init(config_entry_setup.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "deconz_devices" diff --git a/tests/components/deconz/test_cover.py b/tests/components/deconz/test_cover.py index 69452c3285e..f1573394fae 100644 --- a/tests/components/deconz/test_cover.py +++ b/tests/components/deconz/test_cover.py @@ -1,10 +1,13 @@ """deCONZ cover platform tests.""" +from collections.abc import Callable from unittest.mock import patch +import pytest +from syrupy import SnapshotAssertion + from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, - ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN as COVER_DOMAIN, @@ -17,80 +20,59 @@ from homeassistant.components.cover import ( SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_CLOSED, - STATE_OPEN, - STATE_UNAVAILABLE, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OPEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_covers( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no cover entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported cover entities are created.""" - data = { - "lights": { - "1": { +@pytest.mark.parametrize( + "light_payload", + [ + { + "0": { "name": "Window covering device", "type": "Window covering device", "state": {"lift": 100, "open": False, "reachable": True}, "modelid": "lumi.curtain", "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "2": { + "1": { "name": "Unsupported cover", "type": "Not a cover", "state": {"reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 - cover = hass.states.get("cover.window_covering_device") - assert cover.state == STATE_CLOSED - assert cover.attributes[ATTR_CURRENT_POSITION] == 0 - assert not hass.states.get("cover.unsupported_cover") + ], +) +async def test_cover( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, + snapshot: SnapshotAssertion, +) -> None: + """Test that all supported cover entities are created.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Event signals cover is open - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"lift": 0, "open": True}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"lift": 0, "open": True}}) cover = hass.states.get("cover.window_covering_device") assert cover.state == STATE_OPEN assert cover.attributes[ATTR_CURRENT_POSITION] == 100 # Verify service calls for cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service open cover @@ -132,56 +114,46 @@ async def test_cover( ) assert aioclient_mock.mock_calls[4][2] == {"stop": True} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "87269755b9b3a046485fdae8d96b252c", + "lastannounced": None, + "lastseen": "2020-08-01T16:22:05Z", + "manufacturername": "AXIS", + "modelid": "Gear", + "name": "Covering device", + "state": { + "bri": 0, + "lift": 0, + "on": False, + "open": True, + "reachable": True, + "tilt": 0, + }, + "swversion": "100-5.3.5.1122", + "type": "Window covering device", + "uniqueid": "00:24:46:00:00:12:34:56-01", + } + ], +) async def test_tilt_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + snapshot: SnapshotAssertion, ) -> None: """Test that tilting a cover works.""" - data = { - "lights": { - "0": { - "etag": "87269755b9b3a046485fdae8d96b252c", - "lastannounced": None, - "lastseen": "2020-08-01T16:22:05Z", - "manufacturername": "AXIS", - "modelid": "Gear", - "name": "Covering device", - "state": { - "bri": 0, - "lift": 0, - "on": False, - "open": True, - "reachable": True, - "tilt": 0, - }, - "swversion": "100-5.3.5.1122", - "type": "Window covering device", - "uniqueid": "00:24:46:00:00:12:34:56-01", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - covering_device = hass.states.get("cover.covering_device") - assert covering_device.state == STATE_OPEN - assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 100 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify service calls for tilting cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service set tilt cover @@ -224,44 +196,45 @@ async def test_tilt_cover( assert aioclient_mock.mock_calls[4][2] == {"stop": True} +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "4cefc909134c8e99086b55273c2bde67", + "hascolor": False, + "lastannounced": "2022-08-08T12:06:18Z", + "lastseen": "2022-08-14T14:22Z", + "manufacturername": "Keen Home Inc", + "modelid": "SV01-410-MP-1.0", + "name": "Vent", + "state": { + "alert": "none", + "bri": 242, + "on": False, + "reachable": True, + "sat": 10, + }, + "swversion": "0x00000012", + "type": "Level controllable output", + "uniqueid": "00:22:a3:00:00:00:00:00-01", + } + ], +) async def test_level_controllable_output_cover( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + snapshot: SnapshotAssertion, ) -> None: """Test that tilting a cover works.""" - data = { - "lights": { - "0": { - "etag": "4cefc909134c8e99086b55273c2bde67", - "hascolor": False, - "lastannounced": "2022-08-08T12:06:18Z", - "lastseen": "2022-08-14T14:22Z", - "manufacturername": "Keen Home Inc", - "modelid": "SV01-410-MP-1.0", - "name": "Vent", - "state": { - "alert": "none", - "bri": 242, - "on": False, - "reachable": True, - "sat": 10, - }, - "swversion": "0x00000012", - "type": "Level controllable output", - "uniqueid": "00:22:a3:00:00:00:00:00-01", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - covering_device = hass.states.get("cover.vent") - assert covering_device.state == STATE_OPEN - assert covering_device.attributes[ATTR_CURRENT_TILT_POSITION] == 97 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.COVER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify service calls for tilting cover - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service open cover diff --git a/tests/components/deconz/test_deconz_event.py b/tests/components/deconz/test_deconz_event.py index 1193f348e38..8bf7bb146d1 100644 --- a/tests/components/deconz/test_deconz_event.py +++ b/tests/components/deconz/test_deconz_event.py @@ -1,12 +1,11 @@ """Test deCONZ remote events.""" -from unittest.mock import patch - from pydeconz.models.sensor.ancillary_control import ( AncillaryControlAction, AncillaryControlPanel, ) from pydeconz.models.sensor.presence import PresenceStatePresenceEvent +import pytest from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -18,31 +17,19 @@ from homeassistant.components.deconz.deconz_event import ( CONF_DECONZ_RELATIVE_ROTARY_EVENT, RELATIVE_ROTARY_DECONZ_TO_EVENT, ) -from homeassistant.const import ( - CONF_DEVICE_ID, - CONF_EVENT, - CONF_ID, - CONF_UNIQUE_ID, - STATE_UNAVAILABLE, -) +from homeassistant.const import CONF_DEVICE_ID, CONF_EVENT, CONF_ID, CONF_UNIQUE_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import WebsocketDataType -from tests.common import async_capture_events -from tests.test_util.aiohttp import AiohttpClientMocker +from tests.common import MockConfigEntry, async_capture_events -async def test_deconz_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, -) -> None: - """Test successful creation of deconz events.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -79,14 +66,23 @@ async def test_deconz_events( "uniqueid": "00:00:00:00:00:00:00:05-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +async def test_deconz_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test successful creation of deconz events.""" assert len(hass.states.async_all()) == 3 # 5 switches + 2 additional devices for deconz service and host assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 7 ) assert hass.states.get("sensor.switch_2_battery").state == "100" @@ -95,15 +91,7 @@ async def test_deconz_events( captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"buttonevent": 2000}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "1", "state": {"buttonevent": 2000}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -117,15 +105,7 @@ async def test_deconz_events( "device_id": device.id, } - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "3", - "state": {"buttonevent": 2000}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "3", "state": {"buttonevent": 2000}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:03")} @@ -140,15 +120,7 @@ async def test_deconz_events( "device_id": device.id, } - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "4", - "state": {"gesture": 0}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"id": "4", "state": {"gesture": 0}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:04")} @@ -164,14 +136,10 @@ async def test_deconz_events( } event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", "id": "5", "state": {"buttonevent": 6002, "angle": 110, "xy": [0.5982, 0.3897]}, } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_changed_sensor) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:05")} @@ -189,39 +157,14 @@ async def test_deconz_events( # Unsupported event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "name": "other name", - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "1", "name": "other name"}) assert len(captured_events) == 4 - await hass.config_entries.async_unload(config_entry.entry_id) - states = hass.states.async_all() - assert len(hass.states.async_all()) == 3 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_deconz_alarm_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, -) -> None: - """Test successful creation of deconz alarm events.""" - data = { - "alarmsystems": { +@pytest.mark.parametrize( + "alarm_system_payload", + [ + { "0": { "name": "default", "config": { @@ -248,43 +191,55 @@ async def test_deconz_alarm_events( }, }, } - }, - "sensors": { - "1": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "invalid_code", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "invalid_code", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +async def test_deconz_alarm_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test successful creation of deconz alarm events.""" assert len(hass.states.async_all()) == 4 # 1 alarm control device + 2 additional devices for deconz service and host assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -292,15 +247,7 @@ async def test_deconz_alarm_events( # Emergency event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.EMERGENCY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.EMERGENCY}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -316,15 +263,7 @@ async def test_deconz_alarm_events( # Fire event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.FIRE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.FIRE}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -340,15 +279,7 @@ async def test_deconz_alarm_events( # Invalid code event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.INVALID_CODE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.INVALID_CODE}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -364,15 +295,7 @@ async def test_deconz_alarm_events( # Panic event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.PANIC}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"action": AncillaryControlAction.PANIC}}) device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:01")} @@ -388,84 +311,57 @@ async def test_deconz_alarm_events( # Only care for changes to specific action events - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"action": AncillaryControlAction.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"action": AncillaryControlAction.ARMED_AWAY}}) assert len(captured_events) == 4 # Only care for action events - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"panel": AncillaryControlPanel.ARMED_AWAY}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"panel": AncillaryControlPanel.ARMED_AWAY}}) assert len(captured_events) == 4 - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(hass.states.async_all()) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", + } + ], +) async def test_deconz_presence_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, ) -> None: """Test successful creation of deconz presence events.""" - data = { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 5 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -485,15 +381,7 @@ async def test_deconz_presence_events( PresenceStatePresenceEvent.LEFT_LEAVE, PresenceStatePresenceEvent.RIGHT_LEAVE, ): - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"presenceevent": presence_event}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data({"state": {"presenceevent": presence_event}}) assert len(captured_events) == 1 assert captured_events[0].data == { @@ -506,69 +394,51 @@ async def test_deconz_presence_events( # Unsupported presence event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"presenceevent": PresenceStatePresenceEvent.NINE}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"presenceevent": PresenceStatePresenceEvent.NINE}}) assert len(captured_events) == 0 - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(hass.states.async_all()) == 5 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 100, + "on": True, + "reachable": True, + }, + "etag": "463728970bdb7d04048fc4373654f45a", + "lastannounced": "2022-07-03T13:57:59Z", + "lastseen": "2022-07-03T14:02Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "RDM002", + "name": "RDM002 44", + "state": { + "expectedeventduration": 400, + "expectedrotation": 75, + "lastupdated": "2022-07-03T11:37:49.586", + "rotaryevent": 2, + }, + "swversion": "2.59.19", + "type": "ZHARelativeRotary", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", + } + ], +) async def test_deconz_relative_rotary_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + sensor_ws_data: WebsocketDataType, ) -> None: """Test successful creation of deconz relative rotary events.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 100, - "on": True, - "reachable": True, - }, - "etag": "463728970bdb7d04048fc4373654f45a", - "lastannounced": "2022-07-03T13:57:59Z", - "lastseen": "2022-07-03T14:02Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "RDM002", - "name": "RDM002 44", - "state": { - "expectedeventduration": 400, - "expectedrotation": 75, - "lastupdated": "2022-07-03T11:37:49.586", - "rotaryevent": 2, - }, - "swversion": "2.59.19", - "type": "ZHARelativeRotary", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-14-fc00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 3 ) @@ -580,18 +450,13 @@ async def test_deconz_relative_rotary_events( for rotary_event, duration, rotation in ((1, 100, 50), (2, 200, -50)): event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", "state": { "rotaryevent": rotary_event, "expectedeventduration": duration, "expectedrotation": rotation, - }, + } } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(event_changed_sensor) assert len(captured_events) == 1 assert captured_events[0].data == { @@ -606,38 +471,14 @@ async def test_deconz_relative_rotary_events( # Unsupported relative rotary event - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "name": "123", - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"name": "123"}) assert len(captured_events) == 0 - await hass.config_entries.async_unload(config_entry.entry_id) - states = hass.states.async_all() - assert len(hass.states.async_all()) == 1 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_deconz_events_bad_unique_id( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - aioclient_mock: AiohttpClientMocker, -) -> None: - """Verify no devices are created if unique id is bad or missing.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1 no unique id", "type": "ZHASwitch", @@ -652,12 +493,20 @@ async def test_deconz_events_bad_unique_id( "uniqueid": "00:00-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +async def test_deconz_events_bad_unique_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, +) -> None: + """Verify no devices are created if unique id is bad or missing.""" assert len(hass.states.async_all()) == 1 assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) + len( + dr.async_entries_for_config_entry( + device_registry, config_entry_setup.entry_id + ) + ) == 2 ) diff --git a/tests/components/deconz/test_device_trigger.py b/tests/components/deconz/test_device_trigger.py index 54b735ba021..6f74db0b82c 100644 --- a/tests/components/deconz/test_device_trigger.py +++ b/tests/components/deconz/test_device_trigger.py @@ -1,6 +1,6 @@ """deCONZ device automation tests.""" -from unittest.mock import Mock, patch +from unittest.mock import Mock import pytest from pytest_unordered import unordered @@ -32,10 +32,9 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.trigger import async_initialize_triggers from homeassistant.setup import async_setup_component -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import WebsocketDataType -from tests.common import async_get_device_automations, async_mock_service -from tests.test_util.aiohttp import AiohttpClientMocker +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -43,45 +42,37 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def automation_calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track automation calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -155,46 +146,44 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 95, + "enrolled": 1, + "on": True, + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "5aaa1c6bae8501f59929539c6e8f44d6", + "lastseen": "2021-07-25T18:07Z", + "manufacturername": "lk", + "modelid": "ZB-KeypadGeneric-D0002", + "name": "Keypad", + "state": { + "action": "armed_stay", + "lastupdated": "2021-07-25T18:02:51.172", + "lowbattery": False, + "panel": "exit_delay", + "seconds_remaining": 55, + "tampered": False, + }, + "swversion": "3.13", + "type": "ZHAAncillaryControl", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_for_alarm_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, ) -> None: """Test triggers work.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 95, - "enrolled": 1, - "on": True, - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "5aaa1c6bae8501f59929539c6e8f44d6", - "lastseen": "2021-07-25T18:07Z", - "manufacturername": "lk", - "modelid": "ZB-KeypadGeneric-D0002", - "name": "Keypad", - "state": { - "action": "armed_stay", - "lastupdated": "2021-07-25T18:02:51.172", - "lowbattery": False, - "panel": "exit_delay", - "seconds_remaining": 55, - "tampered": False, - }, - "swversion": "3.13", - "type": "ZHAAncillaryControl", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "00:00:00:00:00:00:00:00")} ) @@ -252,37 +241,34 @@ async def test_get_triggers_for_alarm_event( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "Unsupported model", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_get_triggers_manage_unsupported_remotes( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, + hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: """Verify no triggers for an unsupported remote.""" - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "Unsupported model", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -296,41 +282,38 @@ async def test_get_triggers_manage_unsupported_remotes( assert triggers == unordered(expected_triggers) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "alert": "none", + "battery": 60, + "group": "10", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "1b355c0b6d2af28febd7ca9165881952", + "manufacturername": "IKEA of Sweden", + "mode": 1, + "modelid": "TRADFRI on/off switch", + "name": "TRÅDFRI on/off switch ", + "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, + "swversion": "1.4.018", + "type": "ZHASwitch", + "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_functional_device_trigger( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - automation_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], + sensor_ws_data: WebsocketDataType, ) -> None: """Test proper matching and attachment of device trigger automation.""" - - data = { - "sensors": { - "1": { - "config": { - "alert": "none", - "battery": 60, - "group": "10", - "on": True, - "reachable": True, - }, - "ep": 1, - "etag": "1b355c0b6d2af28febd7ca9165881952", - "manufacturername": "IKEA of Sweden", - "mode": 1, - "modelid": "TRADFRI on/off switch", - "name": "TRÅDFRI on/off switch ", - "state": {"buttonevent": 2002, "lastupdated": "2019-09-07T07:39:39"}, - "swversion": "1.4.018", - "type": "ZHASwitch", - "uniqueid": "d0:cf:5e:ff:fe:71:a4:3a-01-1000", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")} ) @@ -359,27 +342,16 @@ async def test_functional_device_trigger( assert len(hass.states.async_entity_ids(AUTOMATION_DOMAIN)) == 1 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"buttonevent": 1002}, - } - await mock_deconz_websocket(data=event_changed_sensor) + await sensor_ws_data({"state": {"buttonevent": 1002}}) await hass.async_block_till_done() - - assert len(automation_calls) == 1 - assert automation_calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" @pytest.mark.skip(reason="Temporarily disabled until automation validation is improved") -async def test_validate_trigger_unknown_device( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_validate_trigger_unknown_device(hass: HomeAssistant) -> None: """Test unknown device does not return a trigger config.""" - await setup_deconz_integration(hass, aioclient_mock) - assert await async_setup_component( hass, AUTOMATION_DOMAIN, @@ -408,14 +380,12 @@ async def test_validate_trigger_unknown_device( async def test_validate_trigger_unsupported_device( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported device doesn't return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="unsupported", ) @@ -450,14 +420,12 @@ async def test_validate_trigger_unsupported_device( async def test_validate_trigger_unsupported_trigger( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test unsupported trigger does not return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, model="TRADFRI on/off switch", ) @@ -494,14 +462,12 @@ async def test_validate_trigger_unsupported_trigger( async def test_attach_trigger_no_matching_event( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + config_entry_setup: MockConfigEntry, ) -> None: """Test no matching event for device doesn't return a trigger config.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, identifiers={(DECONZ_DOMAIN, "d0:cf:5e:ff:fe:71:a4:3a")}, name="Tradfri switch", model="TRADFRI on/off switch", diff --git a/tests/components/deconz/test_diagnostics.py b/tests/components/deconz/test_diagnostics.py index bfbc27b206d..2abc6d83995 100644 --- a/tests/components/deconz/test_diagnostics.py +++ b/tests/components/deconz/test_diagnostics.py @@ -2,30 +2,28 @@ from pydeconz.websocket import State from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant -from .test_gateway import setup_deconz_integration +from .conftest import WebsocketStateType +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateType, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - await mock_deconz_websocket(state=State.RUNNING) + await mock_websocket_state(State.RUNNING) await hass.async_block_till_done() - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/deconz/test_fan.py b/tests/components/deconz/test_fan.py index 5da0398c3e6..21809a138c6 100644 --- a/tests/components/deconz/test_fan.py +++ b/tests/components/deconz/test_fan.py @@ -1,9 +1,10 @@ """deCONZ fan platform tests.""" +from collections.abc import Callable from unittest.mock import patch import pytest -from voluptuous.error import MultipleInvalid +from syrupy import SnapshotAssertion from homeassistant.components.fan import ( ATTR_PERCENTAGE, @@ -12,129 +13,67 @@ from homeassistant.components.fan import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_fans( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no fan entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "432f3de28965052961a99e3c5494daf4", + "hascolor": False, + "manufacturername": "King Of Fans, Inc.", + "modelid": "HDC52EastwindFan", + "name": "Ceiling fan", + "state": { + "alert": "none", + "bri": 254, + "on": False, + "reachable": True, + "speed": 4, + }, + "swversion": "0000000F", + "type": "Fan", + "uniqueid": "00:22:a3:00:00:27:8b:81-01", + } + ], +) async def test_fans( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, ) -> None: """Test that all supported fan entities are created.""" - data = { - "lights": { - "1": { - "etag": "432f3de28965052961a99e3c5494daf4", - "hascolor": False, - "manufacturername": "King Of Fans, Inc.", - "modelid": "HDC52EastwindFan", - "name": "Ceiling fan", - "state": { - "alert": "none", - "bri": 254, - "on": False, - "reachable": True, - "speed": 4, - }, - "swversion": "0000000F", - "type": "Fan", - "uniqueid": "00:22:a3:00:00:27:8b:81-01", - } - } - } + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.FAN]): + config_entry = await config_entry_factory() - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 # Light and fan - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Test states - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 1}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 2}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 4}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 0}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + for speed, percent in (1, 25), (2, 50), (3, 75), (4, 100): + await light_ws_data({"state": {"speed": speed}}) + assert hass.states.get("fan.ceiling_fan").state == STATE_ON + assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == percent + await light_ws_data({"state": {"speed": 0}}) assert hass.states.get("fan.ceiling_fan").state == STATE_OFF assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 # Test service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on fan using saved default_on_speed @@ -166,323 +105,20 @@ async def test_fans( ) assert aioclient_mock.mock_calls[3][2] == {"speed": 1} - # Service set fan percentage to 20% + # Service set fan percentage - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 20}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"speed": 1} - - # Service set fan percentage to 40% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 40}, - blocking=True, - ) - assert aioclient_mock.mock_calls[5][2] == {"speed": 2} - - # Service set fan percentage to 60% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 60}, - blocking=True, - ) - assert aioclient_mock.mock_calls[6][2] == {"speed": 3} - - # Service set fan percentage to 80% - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 80}, - blocking=True, - ) - assert aioclient_mock.mock_calls[7][2] == {"speed": 4} - - # Service set fan percentage to 0% does not equal off - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[8][2] == {"speed": 0} - - # Events with an unsupported speed does not get converted - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 5}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert not hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_fans_legacy_speed_modes( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported fan entities are created. - - Legacy fan support. - """ - data = { - "lights": { - "1": { - "etag": "432f3de28965052961a99e3c5494daf4", - "hascolor": False, - "manufacturername": "King Of Fans, Inc.", - "modelid": "HDC52EastwindFan", - "name": "Ceiling fan", - "state": { - "alert": "none", - "bri": 254, - "on": False, - "reachable": True, - "speed": 4, - }, - "swversion": "0000000F", - "type": "Fan", - "uniqueid": "00:22:a3:00:00:27:8b:81-01", - } - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 # Light and fan - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - - # Test states - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 1}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 25 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 2}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 50 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 4}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 100 - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 0}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - - assert hass.states.get("fan.ceiling_fan").state == STATE_OFF - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 0 - - # Test service calls - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") - - # Service turn on fan using saved default_on_speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan"}, - blocking=True, - ) - assert aioclient_mock.mock_calls[1][2] == {"speed": 4} - - # Service turn on fan with speed_off - # async_turn_on_compat use speed_to_percentage which will return 0 - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[2][2] == {"speed": 0} - - # Service turn on fan with bad speed - # async_turn_on_compat use speed_to_percentage which will convert to SPEED_MEDIUM -> 2 - - with pytest.raises(MultipleInvalid): - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad"}, - blocking=True, - ) - - # Service turn on fan to low speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, - blocking=True, - ) - assert aioclient_mock.mock_calls[3][2] == {"speed": 1} - - # Service turn on fan to medium speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, - blocking=True, - ) - assert aioclient_mock.mock_calls[4][2] == {"speed": 2} - - # Service turn on fan to high speed - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, - blocking=True, - ) - assert aioclient_mock.mock_calls[5][2] == {"speed": 4} - - # Service set fan speed to low - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 25}, - blocking=True, - ) - assert aioclient_mock.mock_calls[6][2] == {"speed": 1} - - # Service set fan speed to medium - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 50}, - blocking=True, - ) - assert aioclient_mock.mock_calls[7][2] == {"speed": 2} - - # Service set fan speed to high - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 100}, - blocking=True, - ) - assert aioclient_mock.mock_calls[8][2] == {"speed": 4} - - # Service set fan speed to off - - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: 0}, - blocking=True, - ) - assert aioclient_mock.mock_calls[9][2] == {"speed": 0} - - # Service set fan speed to unsupported value - - with pytest.raises(MultipleInvalid): + for percent, speed in (20, 1), (40, 2), (60, 3), (80, 4), (0, 0): + aioclient_mock.mock_calls.clear() await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: "bad value"}, + {ATTR_ENTITY_ID: "fan.ceiling_fan", ATTR_PERCENTAGE: percent}, blocking=True, ) + assert aioclient_mock.mock_calls[0][2] == {"speed": speed} - # Events with an unsupported speed gets converted to default speed "medium" - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"speed": 3}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + # Events with an unsupported speed does not get converted + await light_ws_data({"state": {"speed": 5}}) assert hass.states.get("fan.ceiling_fan").state == STATE_ON - assert hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] == 75 - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + assert not hass.states.get("fan.ceiling_fan").attributes[ATTR_PERCENTAGE] diff --git a/tests/components/deconz/test_gateway.py b/tests/components/deconz/test_gateway.py deleted file mode 100644 index b00a5cc1f05..00000000000 --- a/tests/components/deconz/test_gateway.py +++ /dev/null @@ -1,319 +0,0 @@ -"""Test deCONZ gateway.""" - -from copy import deepcopy -from typing import Any -from unittest.mock import patch - -import pydeconz -from pydeconz.websocket import State -import pytest - -from homeassistant.components import ssdp -from homeassistant.components.alarm_control_panel import ( - DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, -) -from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN -from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN -from homeassistant.components.cover import DOMAIN as COVER_DOMAIN -from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect -from homeassistant.components.deconz.hub import DeconzHub, get_deconz_api -from homeassistant.components.fan import DOMAIN as FAN_DOMAIN -from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN -from homeassistant.components.ssdp import ( - ATTR_UPNP_MANUFACTURER_URL, - ATTR_UPNP_SERIAL, - ATTR_UPNP_UDN, -) -from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER -from homeassistant.const import ( - CONF_API_KEY, - CONF_HOST, - CONF_PORT, - CONTENT_TYPE_JSON, - STATE_OFF, - STATE_UNAVAILABLE, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.typing import UNDEFINED, UndefinedType - -from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker - -API_KEY = "1234567890ABCDEF" -BRIDGEID = "01234E56789A" -HOST = "1.2.3.4" -PORT = 80 - -DEFAULT_URL = f"http://{HOST}:{PORT}/api/{API_KEY}" - -ENTRY_CONFIG = {CONF_API_KEY: API_KEY, CONF_HOST: HOST, CONF_PORT: PORT} - -ENTRY_OPTIONS = {} - -DECONZ_CONFIG = { - "bridgeid": BRIDGEID, - "ipaddress": HOST, - "mac": "00:11:22:33:44:55", - "modelid": "deCONZ", - "name": "deCONZ mock gateway", - "sw_version": "2.05.69", - "uuid": "1234", - "websocketport": 1234, -} - -DECONZ_WEB_REQUEST = { - "config": DECONZ_CONFIG, - "groups": {}, - "lights": {}, - "sensors": {}, -} - - -def mock_deconz_request(aioclient_mock, config, data): - """Mock a deCONZ get request.""" - host = config[CONF_HOST] - port = config[CONF_PORT] - api_key = config[CONF_API_KEY] - - aioclient_mock.get( - f"http://{host}:{port}/api/{api_key}", - json=deepcopy(data), - headers={"content-type": CONTENT_TYPE_JSON}, - ) - - -def mock_deconz_put_request(aioclient_mock, config, path): - """Mock a deCONZ put request.""" - host = config[CONF_HOST] - port = config[CONF_PORT] - api_key = config[CONF_API_KEY] - - aioclient_mock.put( - f"http://{host}:{port}/api/{api_key}{path}", - json={}, - headers={"content-type": CONTENT_TYPE_JSON}, - ) - - -async def setup_deconz_integration( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker | None = None, - *, - options: dict[str, Any] | UndefinedType = UNDEFINED, - entry_id="1", - unique_id=BRIDGEID, - source=SOURCE_USER, -): - """Create the deCONZ gateway.""" - config_entry = MockConfigEntry( - domain=DECONZ_DOMAIN, - source=source, - data=deepcopy(ENTRY_CONFIG), - options=deepcopy(ENTRY_OPTIONS if options is UNDEFINED else options), - entry_id=entry_id, - unique_id=unique_id, - ) - config_entry.add_to_hass(hass) - - if aioclient_mock: - mock_deconz_request(aioclient_mock, ENTRY_CONFIG, DECONZ_WEB_REQUEST) - - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - -async def test_gateway_setup( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Successful setup.""" - # Patching async_forward_entry_setup* is not advisable, and should be refactored - # in the future. - with patch( - "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", - return_value=True, - ) as forward_entry_setup: - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - assert gateway.bridgeid == BRIDGEID - assert gateway.master is True - assert gateway.config.allow_clip_sensor is False - assert gateway.config.allow_deconz_groups is True - assert gateway.config.allow_new_devices is True - - assert len(gateway.deconz_ids) == 0 - assert len(hass.states.async_all()) == 0 - - assert forward_entry_setup.mock_calls[0][1] == ( - config_entry, - [ - ALARM_CONTROL_PANEL_DOMAIN, - BINARY_SENSOR_DOMAIN, - BUTTON_DOMAIN, - CLIMATE_DOMAIN, - COVER_DOMAIN, - FAN_DOMAIN, - LIGHT_DOMAIN, - LOCK_DOMAIN, - NUMBER_DOMAIN, - SCENE_DOMAIN, - SELECT_DOMAIN, - SENSOR_DOMAIN, - SIREN_DOMAIN, - SWITCH_DOMAIN, - ], - ) - - gateway_entry = device_registry.async_get_device( - identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} - ) - - assert gateway_entry.configuration_url == f"http://{HOST}:{PORT}" - assert gateway_entry.entry_type is dr.DeviceEntryType.SERVICE - - -async def test_gateway_device_configuration_url_when_addon( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Successful setup.""" - # Patching async_forward_entry_setup* is not advisable, and should be refactored - # in the future. - with patch( - "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups", - return_value=True, - ): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, source=SOURCE_HASSIO - ) - gateway = DeconzHub.get_hub(hass, config_entry) - - gateway_entry = device_registry.async_get_device( - identifiers={(DECONZ_DOMAIN, gateway.bridgeid)} - ) - - assert ( - gateway_entry.configuration_url == "homeassistant://hassio/ingress/core_deconz" - ) - - -async def test_connection_status_signalling( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - data = { - "sensors": { - "1": { - "name": "presence", - "type": "ZHAPresence", - "state": {"presence": False}, - "config": {"on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - await mock_deconz_websocket(state=State.RETRYING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE - - await mock_deconz_websocket(state=State.RUNNING) - await hass.async_block_till_done() - - assert hass.states.get("binary_sensor.presence").state == STATE_OFF - - -async def test_update_address( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - assert gateway.api.host == "1.2.3.4" - - with patch( - "homeassistant.components.deconz.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=ssdp.SsdpServiceInfo( - ssdp_st="mock_st", - ssdp_usn="mock_usn", - ssdp_location="http://2.3.4.5:80/", - upnp={ - ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, - ATTR_UPNP_SERIAL: BRIDGEID, - ATTR_UPNP_UDN: "uuid:456DEF", - }, - ), - context={"source": SOURCE_SSDP}, - ) - await hass.async_block_till_done() - - assert gateway.api.host == "2.3.4.5" - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_reset_after_successful_setup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Make sure that connection status triggers a dispatcher send.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - gateway = DeconzHub.get_hub(hass, config_entry) - - result = await gateway.async_reset() - await hass.async_block_till_done() - - assert result is True - - -async def test_get_deconz_api(hass: HomeAssistant) -> None: - """Successful call.""" - config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) - with patch("pydeconz.DeconzSession.refresh_state", return_value=True): - assert await get_deconz_api(hass, config_entry) - - -@pytest.mark.parametrize( - ("side_effect", "raised_exception"), - [ - (TimeoutError, CannotConnect), - (pydeconz.RequestError, CannotConnect), - (pydeconz.ResponseError, CannotConnect), - (pydeconz.Unauthorized, AuthenticationRequired), - ], -) -async def test_get_deconz_api_fails( - hass: HomeAssistant, side_effect, raised_exception -) -> None: - """Failed call.""" - config_entry = MockConfigEntry(domain=DECONZ_DOMAIN, data=ENTRY_CONFIG) - with ( - patch( - "pydeconz.DeconzSession.refresh_state", - side_effect=side_effect, - ), - pytest.raises(raised_exception), - ): - assert await get_deconz_api(hass, config_entry) diff --git a/tests/components/deconz/test_hub.py b/tests/components/deconz/test_hub.py new file mode 100644 index 00000000000..43c51179337 --- /dev/null +++ b/tests/components/deconz/test_hub.py @@ -0,0 +1,100 @@ +"""Test deCONZ gateway.""" + +from unittest.mock import patch + +from pydeconz.websocket import State +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components import ssdp +from homeassistant.components.deconz.config_flow import DECONZ_MANUFACTURERURL +from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN +from homeassistant.components.ssdp import ( + ATTR_UPNP_MANUFACTURER_URL, + ATTR_UPNP_SERIAL, + ATTR_UPNP_UDN, +) +from homeassistant.config_entries import SOURCE_SSDP +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import BRIDGE_ID + +from tests.common import MockConfigEntry + + +async def test_device_registry_entry( + config_entry_setup: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Successful setup.""" + device_entry = device_registry.async_get_device( + identifiers={(DECONZ_DOMAIN, config_entry_setup.unique_id)} + ) + assert device_entry == snapshot + + +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "presence", + "type": "ZHAPresence", + "state": {"presence": False}, + "config": {"on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_connection_status_signalling( + hass: HomeAssistant, mock_websocket_state +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + await mock_websocket_state(State.RETRYING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_UNAVAILABLE + + await mock_websocket_state(State.RUNNING) + await hass.async_block_till_done() + + assert hass.states.get("binary_sensor.presence").state == STATE_OFF + + +async def test_update_address( + hass: HomeAssistant, config_entry_setup: MockConfigEntry +) -> None: + """Make sure that connection status triggers a dispatcher send.""" + assert config_entry_setup.data["host"] == "1.2.3.4" + + with ( + patch( + "homeassistant.components.deconz.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch("pydeconz.gateway.WSClient") as ws_mock, + ): + await hass.config_entries.flow.async_init( + DECONZ_DOMAIN, + data=ssdp.SsdpServiceInfo( + ssdp_st="mock_st", + ssdp_usn="mock_usn", + ssdp_location="http://2.3.4.5:80/", + upnp={ + ATTR_UPNP_MANUFACTURER_URL: DECONZ_MANUFACTURERURL, + ATTR_UPNP_SERIAL: BRIDGE_ID, + ATTR_UPNP_UDN: "uuid:456DEF", + }, + ), + context={"source": SOURCE_SSDP}, + ) + await hass.async_block_till_done() + + assert ws_mock.call_args[0][1] == "2.3.4.5" + assert config_entry_setup.data["host"] == "2.3.4.5" + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/deconz/test_init.py b/tests/components/deconz/test_init.py index d08bd039184..390d8b9b353 100644 --- a/tests/components/deconz/test_init.py +++ b/tests/components/deconz/test_init.py @@ -3,64 +3,59 @@ import asyncio from unittest.mock import patch -from homeassistant.components.deconz import ( - DeconzHub, - async_setup_entry, - async_unload_entry, +import pydeconz +import pytest + +from homeassistant.components.deconz.const import ( + CONF_MASTER_GATEWAY, + DOMAIN as DECONZ_DOMAIN, ) -from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN -from homeassistant.components.deconz.errors import AuthenticationRequired, CannotConnect +from homeassistant.components.deconz.errors import AuthenticationRequired +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration +from .conftest import ConfigEntryFactoryType -from tests.test_util.aiohttp import AiohttpClientMocker - -ENTRY1_HOST = "1.2.3.4" -ENTRY1_PORT = 80 -ENTRY1_API_KEY = "1234567890ABCDEF" -ENTRY1_BRIDGEID = "12345ABC" -ENTRY1_UUID = "456DEF" - -ENTRY2_HOST = "2.3.4.5" -ENTRY2_PORT = 80 -ENTRY2_API_KEY = "1234567890ABCDEF" -ENTRY2_BRIDGEID = "23456DEF" -ENTRY2_UUID = "789ACE" +from tests.common import MockConfigEntry -async def setup_entry(hass, entry): - """Test that setup entry works.""" - with ( - patch.object(DeconzHub, "async_setup", return_value=True), - patch.object(DeconzHub, "async_update_device_registry", return_value=True), - ): - assert await async_setup_entry(hass, entry) is True +async def test_setup_entry(config_entry_setup: MockConfigEntry) -> None: + """Test successful setup of entry.""" + assert config_entry_setup.state is ConfigEntryState.LOADED + assert config_entry_setup.options[CONF_MASTER_GATEWAY] is True -async def test_setup_entry_successful( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +@pytest.mark.parametrize( + ("side_effect", "state"), + [ + # Failed authentication trigger a reauthentication flow + (pydeconz.Unauthorized, ConfigEntryState.SETUP_ERROR), + # Connection fails + (TimeoutError, ConfigEntryState.SETUP_RETRY), + (pydeconz.RequestError, ConfigEntryState.SETUP_RETRY), + (pydeconz.ResponseError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_get_deconz_api_fails( + hass: HomeAssistant, + config_entry: MockConfigEntry, + side_effect: Exception, + state: ConfigEntryState, ) -> None: - """Test setup entry is successful.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert hass.data[DECONZ_DOMAIN] - assert config_entry.entry_id in hass.data[DECONZ_DOMAIN] - assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master - - -async def test_setup_entry_fails_config_entry_not_ready(hass: HomeAssistant) -> None: - """Failed authentication trigger a reauthentication flow.""" + """Failed setup.""" + config_entry.add_to_hass(hass) with patch( - "homeassistant.components.deconz.get_deconz_api", - side_effect=CannotConnect, + "homeassistant.components.deconz.hub.api.DeconzSession.refresh_state", + side_effect=side_effect, ): - await setup_deconz_integration(hass) - - assert hass.data[DECONZ_DOMAIN] == {} + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is state -async def test_setup_entry_fails_trigger_reauth_flow(hass: HomeAssistant) -> None: +async def test_setup_entry_fails_trigger_reauth_flow( + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType +) -> None: """Failed authentication trigger a reauthentication flow.""" with ( patch( @@ -69,89 +64,83 @@ async def test_setup_entry_fails_trigger_reauth_flow(hass: HomeAssistant) -> Non ), patch.object(hass.config_entries.flow, "async_init") as mock_flow_init, ): - await setup_deconz_integration(hass) + config_entry = await config_entry_factory() mock_flow_init.assert_called_once() - - assert hass.data[DECONZ_DOMAIN] == {} + assert config_entry.state is ConfigEntryState.SETUP_ERROR async def test_setup_entry_multiple_gateways( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test setup entry is successful with multiple gateways.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 - assert hass.data[DECONZ_DOMAIN][config_entry.entry_id].master - assert not hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED + assert config_entry.options[CONF_MASTER_GATEWAY] is True + assert config_entry2.options[CONF_MASTER_GATEWAY] is False async def test_unload_entry( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test being able to unload an entry.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert hass.data[DECONZ_DOMAIN] - - assert await async_unload_entry(hass, config_entry) - assert not hass.data[DECONZ_DOMAIN] + assert config_entry_setup.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(config_entry_setup.entry_id) + assert config_entry_setup.state is ConfigEntryState.NOT_LOADED async def test_unload_entry_multiple_gateways( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test being able to unload an entry and master gateway gets moved.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED - assert await async_unload_entry(hass, config_entry) - - assert len(hass.data[DECONZ_DOMAIN]) == 1 - assert hass.data[DECONZ_DOMAIN][config_entry2.entry_id].master + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert config_entry2.options[CONF_MASTER_GATEWAY] is True async def test_unload_entry_multiple_gateways_parallel( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Test race condition when unloading multiple config entries in parallel.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - aioclient_mock.clear_requests() + config_entry = await config_entry_factory() - data = {"config": {"bridgeid": "01234E56789B"}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry2 = await setup_deconz_integration( - hass, - aioclient_mock, - entry_id="2", - unique_id="01234E56789B", - ) + entry2 = MockConfigEntry( + domain=DECONZ_DOMAIN, + entry_id="2", + unique_id="01234E56789B", + data=config_entry.data | {"host": "2.3.4.5"}, + ) + config_entry2 = await config_entry_factory(entry2) - assert len(hass.data[DECONZ_DOMAIN]) == 2 + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry2.state is ConfigEntryState.LOADED await asyncio.gather( hass.config_entries.async_unload(config_entry.entry_id), hass.config_entries.async_unload(config_entry2.entry_id), ) - assert len(hass.data[DECONZ_DOMAIN]) == 0 + assert config_entry.state is ConfigEntryState.NOT_LOADED + assert config_entry2.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/deconz/test_light.py b/tests/components/deconz/test_light.py index d964361df57..441cb01be63 100644 --- a/tests/components/deconz/test_light.py +++ b/tests/components/deconz/test_light.py @@ -1,22 +1,20 @@ """deCONZ light platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.deconz.const import ATTR_ON, CONF_ALLOW_DECONZ_GROUPS -from homeassistant.components.deconz.light import DECONZ_GROUP +from homeassistant.components.deconz.const import CONF_ALLOW_DECONZ_GROUPS from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, ATTR_EFFECT, - ATTR_EFFECT_LIST, ATTR_FLASH, ATTR_HS_COLOR, - ATTR_MAX_MIREDS, - ATTR_MIN_MIREDS, - ATTR_RGB_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, ATTR_XY_COLOR, @@ -34,29 +32,19 @@ from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_lights_or_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no lights or groups entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - @pytest.mark.parametrize( - ("input", "expected"), + "light_payload", [ ( # RGB light in color temp color mode { @@ -85,28 +73,7 @@ async def test_no_lights_or_groups( "swversion": "5.127.1.26420", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-00", - }, - { - "entity_id": "light.hue_go", - "state": STATE_ON, - "attributes": { - ATTR_BRIGHTNESS: 254, - ATTR_COLOR_TEMP: 375, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - DECONZ_GROUP: False, - }, - }, + } ), ( # RGB light in XY color mode { @@ -135,30 +102,7 @@ async def test_no_lights_or_groups( "swversion": "1.65.9_hB3217DF4", "type": "Extended color light", "uniqueid": "00:17:88:01:01:23:45:67-01", - }, - { - "entity_id": "light.hue_ensis", - "state": STATE_ON, - "attributes": { - ATTR_MIN_MIREDS: 140, - ATTR_MAX_MIREDS: 650, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.XY, - ATTR_BRIGHTNESS: 254, - ATTR_HS_COLOR: (29.691, 38.039), - ATTR_RGB_COLOR: (255, 206, 158), - ATTR_XY_COLOR: (0.427, 0.373), - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, + } ), ( # RGB light with only HS color mode { @@ -181,41 +125,7 @@ async def test_no_lights_or_groups( "swversion": None, "type": "Color dimmable light", "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - }, - { - "entity_id": "light.lidl_xmas_light", - "state": STATE_ON, - "attributes": { - ATTR_EFFECT_LIST: [ - "carnival", - "collide", - "fading", - "fireworks", - "flag", - "glow", - "rainbow", - "snake", - "snow", - "sparkles", - "steady", - "strobe", - "twinkle", - "updown", - "vintage", - "waves", - ], - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.HS], - ATTR_COLOR_MODE: ColorMode.HS, - ATTR_BRIGHTNESS: 25, - ATTR_HS_COLOR: (294.938, 55.294), - ATTR_RGB_COLOR: (243, 113, 255), - ATTR_XY_COLOR: (0.357, 0.188), - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, + } ), ( # Tunable white light in CT color mode { @@ -240,22 +150,7 @@ async def test_no_lights_or_groups( "swversion": "1.46.13_r26312", "type": "Color temperature light", "uniqueid": "00:17:88:01:01:23:45:67-02", - }, - { - "entity_id": "light.hue_white_ambiance", - "state": STATE_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 454, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 254, - ATTR_COLOR_TEMP: 396, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH, - }, - }, + } ), ( # Dimmable light { @@ -270,19 +165,7 @@ async def test_no_lights_or_groups( "swversion": "1.55.8_r28815", "type": "Dimmable light", "uniqueid": "00:17:88:01:01:23:45:67-03", - }, - { - "entity_id": "light.hue_filament", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.BRIGHTNESS], - ATTR_COLOR_MODE: ColorMode.BRIGHTNESS, - ATTR_BRIGHTNESS: 254, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH, - }, - }, + } ), ( # On/Off light { @@ -297,17 +180,7 @@ async def test_no_lights_or_groups( "swversion": "2.0", "type": "Simple light", "uniqueid": "00:15:8d:00:01:23:45:67-01", - }, - { - "entity_id": "light.simple_light", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.ONOFF], - ATTR_COLOR_MODE: ColorMode.ONOFF, - DECONZ_GROUP: False, - ATTR_SUPPORTED_FEATURES: 0, - }, - }, + } ), ( # Gradient light { @@ -406,98 +279,63 @@ async def test_no_lights_or_groups( "swversion": "1.104.2", "type": "Extended color light", "uniqueid": "00:17:88:01:0b:0c:0d:0e-0f", - }, - { - "entity_id": "light.gradient_light", - "state": STATE_ON, - "attributes": { - ATTR_SUPPORTED_COLOR_MODES: [ - ColorMode.COLOR_TEMP, - ColorMode.HS, - ColorMode.XY, - ], - ATTR_COLOR_MODE: ColorMode.XY, - }, - }, + } ), ], ) async def test_lights( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, ) -> None: """Test that different light entities are created with expected values.""" - data = {"lights": {"0": input}} - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 1 - - light = hass.states.get(expected["entity_id"]) - assert light.state == expected["state"] - for attribute, expected_value in expected["attributes"].items(): - assert light.attributes[attribute] == expected_value - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + "light_payload", + [ + { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": True, + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_light_state_change( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + light_ws_data: WebsocketDataType, ) -> None: """Verify light can change state on websocket event.""" - data = { - "lights": { - "0": { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": True, - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert hass.states.get("light.hue_go").state == STATE_ON - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "0", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": False}}) assert hass.states.get("light.hue_go").state == STATE_OFF @@ -639,44 +477,45 @@ async def test_light_state_change( ], ) async def test_light_service_calls( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_factory: ConfigEntryFactoryType, + light_payload: dict[str, Any], + mock_put_request: Callable[[str, str], AiohttpClientMocker], + input: dict[str, Any], + expected: dict[str, Any], ) -> None: """Verify light can change state on websocket event.""" - data = { - "lights": { - "0": { - "colorcapabilities": 31, - "ctmax": 500, - "ctmin": 153, - "etag": "055485a82553e654f156d41c9301b7cf", - "hascolor": True, - "lastannounced": None, - "lastseen": "2021-06-10T20:25Z", - "manufacturername": "Philips", - "modelid": "LLC020", - "name": "Hue Go", - "state": { - "alert": "none", - "bri": 254, - "colormode": "ct", - "ct": 375, - "effect": "none", - "hue": 8348, - "on": input["light_on"], - "reachable": True, - "sat": 147, - "xy": [0.462, 0.4111], - }, - "swversion": "5.127.1.26420", - "type": "Extended color light", - "uniqueid": "00:17:88:01:01:23:45:67-00", - } - } + light_payload[0] = { + "colorcapabilities": 31, + "ctmax": 500, + "ctmin": 153, + "etag": "055485a82553e654f156d41c9301b7cf", + "hascolor": True, + "lastannounced": None, + "lastseen": "2021-06-10T20:25Z", + "manufacturername": "Philips", + "modelid": "LLC020", + "name": "Hue Go", + "state": { + "alert": "none", + "bri": 254, + "colormode": "ct", + "ct": 375, + "effect": "none", + "hue": 8348, + "on": input["light_on"], + "reachable": True, + "sat": 147, + "xy": [0.462, 0.4111], + }, + "swversion": "5.127.1.26420", + "type": "Extended color light", + "uniqueid": "00:17:88:01:01:23:45:67-00", } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -690,41 +529,41 @@ async def test_light_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called +@pytest.mark.parametrize( + "light_payload", + [ + { + "colorcapabilities": 0, + "ctmax": 65535, + "ctmin": 0, + "etag": "9dd510cd474791481f189d2a68a3c7f1", + "hascolor": True, + "lastannounced": "2020-12-17T17:44:38Z", + "lastseen": "2021-01-11T18:36Z", + "manufacturername": "IKEA of Sweden", + "modelid": "TRADFRI bulb E27 WS opal 1000lm", + "name": "IKEA light", + "state": { + "alert": "none", + "bri": 156, + "colormode": "ct", + "ct": 250, + "on": True, + "reachable": True, + }, + "swversion": "2.0.022", + "type": "Color temperature light", + "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_ikea_default_transition_time( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Verify that service calls to IKEA lights always extend with transition tinme 0 if absent.""" - data = { - "lights": { - "0": { - "colorcapabilities": 0, - "ctmax": 65535, - "ctmin": 0, - "etag": "9dd510cd474791481f189d2a68a3c7f1", - "hascolor": True, - "lastannounced": "2020-12-17T17:44:38Z", - "lastseen": "2021-01-11T18:36Z", - "manufacturername": "IKEA of Sweden", - "modelid": "TRADFRI bulb E27 WS opal 1000lm", - "name": "IKEA light", - "state": { - "alert": "none", - "bri": 156, - "colormode": "ct", - "ct": 250, - "on": True, - "reachable": True, - }, - "swversion": "2.0.022", - "type": "Color temperature light", - "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -758,40 +597,39 @@ async def test_ikea_default_transition_time( } +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "87a89542bf9b9d0aa8134919056844f8", + "hascolor": True, + "lastannounced": None, + "lastseen": "2020-12-05T22:57Z", + "manufacturername": "_TZE200_s8gkrkxk", + "modelid": "TS0601", + "name": "LIDL xmas light", + "state": { + "bri": 25, + "colormode": "hs", + "effect": "none", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + }, + "swversion": None, + "type": "Color dimmable light", + "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lidl_christmas_light( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that lights or groups entities are created.""" - data = { - "lights": { - "0": { - "etag": "87a89542bf9b9d0aa8134919056844f8", - "hascolor": True, - "lastannounced": None, - "lastseen": "2020-12-05T22:57Z", - "manufacturername": "_TZE200_s8gkrkxk", - "modelid": "TS0601", - "name": "LIDL xmas light", - "state": { - "bri": 25, - "colormode": "hs", - "effect": "none", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - }, - "swversion": None, - "type": "Color dimmable light", - "uniqueid": "58:8e:81:ff:fe:db:7b:be-01", - } - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/0/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( LIGHT_DOMAIN, @@ -803,135 +641,37 @@ async def test_lidl_christmas_light( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == {"on": True, "hue": 3640, "sat": 76} - assert hass.states.get("light.lidl_xmas_light") -async def test_configuration_tool( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Verify that configuration tool is not created.""" - data = { - "lights": { - "0": { - "etag": "26839cb118f5bf7ba1f2108256644010", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-11-22T11:27Z", - "manufacturername": "dresden elektronik", - "modelid": "ConBee II", - "name": "Configuration tool 1", - "state": {"reachable": True}, - "swversion": "0x264a0700", - "type": "Configuration tool", - "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", - } +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "26839cb118f5bf7ba1f2108256644010", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-11-22T11:27Z", + "manufacturername": "dresden elektronik", + "modelid": "ConBee II", + "name": "Configuration tool 1", + "state": {"reachable": True}, + "swversion": "0x264a0700", + "type": "Configuration tool", + "uniqueid": "00:21:2e:ff:ff:05:a7:a3-01", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_configuration_tool(hass: HomeAssistant) -> None: + """Verify that configuration tool is not created.""" assert len(hass.states.async_all()) == 0 @pytest.mark.parametrize( - ("input", "expected"), + "light_payload", [ - ( - { - "lights": ["1", "2", "3"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 255, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: 44, - }, - }, - ), - ( - { - "lights": ["3", "1", "2"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.COLOR_TEMP, - ATTR_BRIGHTNESS: 50, - ATTR_EFFECT_LIST: [EFFECT_COLORLOOP], - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, - ), - ( - { - "lights": ["2", "3", "1"], - }, - { - "entity_id": "light.group", - "state": ATTR_ON, - "attributes": { - ATTR_MIN_MIREDS: 153, - ATTR_MAX_MIREDS: 500, - ATTR_SUPPORTED_COLOR_MODES: [ColorMode.COLOR_TEMP, ColorMode.XY], - ATTR_COLOR_MODE: ColorMode.XY, - ATTR_HS_COLOR: (52.0, 100.0), - ATTR_RGB_COLOR: (255, 221, 0), - ATTR_XY_COLOR: (0.5, 0.5), - "all_on": False, - DECONZ_GROUP: True, - ATTR_SUPPORTED_FEATURES: LightEntityFeature.TRANSITION - | LightEntityFeature.FLASH - | LightEntityFeature.EFFECT, - }, - }, - ), - ], -) -async def test_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected -) -> None: - """Test that different group entities are created with expected values.""" - data = { - "groups": { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": { - "alert": "none", - "bri": 127, - "colormode": "hs", - "ct": 0, - "effect": "none", - "hue": 0, - "on": True, - "sat": 127, - "scene": None, - "xy": [0, 0], - }, - "scenes": [], - "lights": input["lights"], - }, - }, - "lights": { + { "1": { "name": "RGB light", "state": { @@ -964,29 +704,95 @@ async def test_groups( "state": {"bri": 255, "on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:02-00", }, + } + ], +) +@pytest.mark.parametrize( + "input", + [ + ({"lights": ["1", "2", "3"]}), + ({"lights": ["3", "1", "2"]}), + ({"lights": ["2", "3", "1"]}), + ], +) +async def test_groups( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + group_payload: dict[str, Any], + input: dict[str, list[str]], + snapshot: SnapshotAssertion, +) -> None: + """Test that different group entities are created with expected values.""" + group_payload |= { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": { + "alert": "none", + "bri": 127, + "colormode": "hs", + "ct": 0, + "effect": "none", + "hue": 0, + "on": True, + "sat": 127, + "scene": None, + "xy": [0, 0], + }, + "scenes": [], + "lights": input["lights"], }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 4 - - group = hass.states.get(expected["entity_id"]) - assert group.state == expected["state"] - for attribute, expected_value in expected["attributes"].items(): - assert group.attributes[attribute] == expected_value - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.LIGHT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) +@pytest.mark.parametrize( + "light_payload", + [ + { + "1": { + "name": "RGB light", + "state": { + "bri": 255, + "colormode": "xy", + "effect": "colorloop", + "hue": 53691, + "on": True, + "reachable": True, + "sat": 141, + "xy": (0.5, 0.5), + }, + "type": "Extended color light", + "uniqueid": "00:00:00:00:00:00:00:00-00", + }, + "2": { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": { + "on": True, + "colormode": "ct", + "ct": 2500, + "reachable": True, + }, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + }, + "3": { + "name": "Dimmable light", + "type": "Dimmable light", + "state": {"bri": 254, "on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + } + ], +) @pytest.mark.parametrize( ("input", "expected"), [ @@ -1045,62 +851,28 @@ async def test_groups( ], ) async def test_group_service_calls( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, input, expected + hass: HomeAssistant, + config_entry_factory: ConfigEntryFactoryType, + group_payload: dict[str, Any], + mock_put_request: Callable[[str, str], AiohttpClientMocker], + input: dict[str, Any], + expected: dict[str, Any], ) -> None: """Verify expected group web request from different service calls.""" - data = { - "groups": { - "0": { - "id": "Light group id", - "name": "Group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": input["group_on"]}, - "action": {}, - "scenes": [], - "lights": input["lights"], - }, - }, - "lights": { - "1": { - "name": "RGB light", - "state": { - "bri": 255, - "colormode": "xy", - "effect": "colorloop", - "hue": 53691, - "on": True, - "reachable": True, - "sat": 141, - "xy": (0.5, 0.5), - }, - "type": "Extended color light", - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - "2": { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": { - "on": True, - "colormode": "ct", - "ct": 2500, - "reachable": True, - }, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - "3": { - "name": "Dimmable light", - "type": "Dimmable light", - "state": {"bri": 254, "on": True, "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, + group_payload |= { + "0": { + "id": "Light group id", + "name": "Group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": input["group_on"]}, + "action": {}, + "scenes": [], + "lights": input["lights"], }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() - mock_deconz_put_request(aioclient_mock, config_entry.data, "/groups/0/action") + aioclient_mock = mock_put_request("/groups/0/action") await hass.services.async_call( LIGHT_DOMAIN, @@ -1114,12 +886,10 @@ async def test_group_service_calls( assert len(aioclient_mock.mock_calls) == 1 # not called -async def test_empty_group( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Verify that a group without a list of lights is not created.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "0": { "id": "Empty group id", "name": "Empty group", @@ -1129,21 +899,20 @@ async def test_empty_group( "scenes": [], "lights": [], }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_empty_group(hass: HomeAssistant) -> None: + """Verify that a group without a list of lights is not created.""" assert len(hass.states.async_all()) == 0 assert not hass.states.get("light.empty_group") -async def test_disable_light_groups( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test disallowing light groups work.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Light group id", "name": "Light group", @@ -1151,7 +920,7 @@ async def test_disable_light_groups( "state": {"all_on": False, "any_on": True}, "action": {}, "scenes": [], - "lights": ["1"], + "lights": ["0"], }, "2": { "id": "Empty group id", @@ -1162,32 +931,35 @@ async def test_disable_light_groups( "scenes": [], "lights": [], }, - }, - "lights": { - "1": { - "ctmax": 454, - "ctmin": 155, - "name": "Tunable white light", - "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, - "type": "Tunable white light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_ALLOW_DECONZ_GROUPS: False}, - ) - + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { + "ctmax": 454, + "ctmin": 155, + "name": "Tunable white light", + "state": {"on": True, "colormode": "ct", "ct": 2500, "reachable": True}, + "type": "Tunable white light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_DECONZ_GROUPS: False}]) +async def test_disable_light_groups( + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, +) -> None: + """Test disallowing light groups work.""" assert len(hass.states.async_all()) == 1 assert hass.states.get("light.tunable_white_light") assert not hass.states.get("light.light_group") assert not hass.states.get("light.empty_group") hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_DECONZ_GROUPS: True} + config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: True} ) await hass.async_block_till_done() @@ -1195,7 +967,7 @@ async def test_disable_light_groups( assert hass.states.get("light.light_group") hass.config_entries.async_update_entry( - config_entry, options={CONF_ALLOW_DECONZ_GROUPS: False} + config_entry_setup, options={CONF_ALLOW_DECONZ_GROUPS: False} ) await hass.async_block_till_done() @@ -1203,16 +975,10 @@ async def test_disable_light_groups( assert not hass.states.get("light.light_group") -async def test_non_color_light_reports_color( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Verify hs_color does not crash when a group gets updated with a bad color value. - - After calling a scene color temp light of certain manufacturers - report color temp in color space. - """ - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "0": { "action": { "alert": "none", @@ -1234,8 +1000,13 @@ async def test_non_color_light_reports_color( "state": {"all_on": False, "any_on": True}, "type": "LightGroup", } - }, - "lights": { + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { "0": { "ctmax": 500, "ctmin": 153, @@ -1285,11 +1056,19 @@ async def test_non_color_light_reports_color( "type": "Color temperature light", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_non_color_light_reports_color( + hass: HomeAssistant, + light_ws_data: WebsocketDataType, +) -> None: + """Verify hs_color does not crash when a group gets updated with a bad color value. + After calling a scene color temp light of certain manufacturers + report color temp in color space. + """ assert len(hass.states.async_all()) == 3 assert hass.states.get("light.group").attributes[ATTR_SUPPORTED_COLOR_MODES] == [ ColorMode.COLOR_TEMP, @@ -1305,9 +1084,7 @@ async def test_non_color_light_reports_color( # Updating a scene will return a faulty color value # for a non-color light causing an exception in hs_color event_changed_light = { - "e": "changed", "id": "1", - "r": "lights", "state": { "alert": None, "bri": 216, @@ -1316,24 +1093,19 @@ async def test_non_color_light_reports_color( "on": True, "reachable": True, }, - "t": "event", "uniqueid": "ec:1b:bd:ff:fe:ee:ed:dd-01", } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data(event_changed_light) group = hass.states.get("light.group") assert group.attributes[ATTR_COLOR_MODE] == ColorMode.XY assert group.attributes[ATTR_HS_COLOR] == (40.571, 41.176) assert group.attributes.get(ATTR_COLOR_TEMP) is None -async def test_verify_group_supported_features( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that group supported features reflect what included lights support.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Group1", "name": "Group", @@ -1343,8 +1115,13 @@ async def test_verify_group_supported_features( "scenes": [], "lights": ["1", "2", "3"], }, - }, - "lights": { + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { "1": { "name": "Dimmable light", "state": {"on": True, "bri": 255, "reachable": True}, @@ -1372,11 +1149,12 @@ async def test_verify_group_supported_features( "type": "Tunable white light", "uniqueid": "00:00:00:00:00:00:00:03-00", }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_verify_group_supported_features(hass: HomeAssistant) -> None: + """Test that group supported features reflect what included lights support.""" assert len(hass.states.async_all()) == 4 group_state = hass.states.get("light.group") @@ -1390,12 +1168,10 @@ async def test_verify_group_supported_features( ) -async def test_verify_group_color_mode_fallback( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that group supported features reflect what included lights support.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "43": { "action": { "alert": "none", @@ -1412,7 +1188,7 @@ async def test_verify_group_color_mode_fallback( "devicemembership": [], "etag": "4548e982c4cfff942f7af80958abb2a0", "id": "43", - "lights": ["13"], + "lights": ["0"], "name": "Opbergruimte", "scenes": [ { @@ -1443,62 +1219,68 @@ async def test_verify_group_color_mode_fallback( "state": {"all_on": False, "any_on": False}, "type": "LightGroup", }, - }, - "lights": { - "13": { - "capabilities": { - "alerts": [ - "none", - "select", - "lselect", - "blink", - "breathe", - "okay", - "channelchange", - "finish", - "stop", - ], - "bri": {"min_dim_level": 5}, - }, - "config": { - "bri": {"execute_if_off": True, "startup": "previous"}, - "groups": ["43"], - "on": {"startup": "previous"}, - }, - "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", - "hascolor": False, - "lastannounced": None, - "lastseen": "2024-03-02T20:08Z", - "manufacturername": "Signify Netherlands B.V.", - "modelid": "LWA001", - "name": "Opbergruimte Lamp Plafond", - "productid": "Philips-LWA001-1-A19DLv5", - "productname": "Hue white lamp", - "state": { - "alert": "none", - "bri": 76, - "effect": "none", - "on": False, - "reachable": True, - }, - "swconfigid": "87169548", - "swversion": "1.104.2", - "type": "Dimmable light", - "uniqueid": "00:17:88:01:08:11:22:33-01", + } + ], +) +@pytest.mark.parametrize( + "light_payload", + [ + { + "capabilities": { + "alerts": [ + "none", + "select", + "lselect", + "blink", + "breathe", + "okay", + "channelchange", + "finish", + "stop", + ], + "bri": {"min_dim_level": 5}, }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + "config": { + "bri": {"execute_if_off": True, "startup": "previous"}, + "groups": ["43"], + "on": {"startup": "previous"}, + }, + "etag": "ca0ed7763eca37f5e6b24f6d46f8a518", + "hascolor": False, + "lastannounced": None, + "lastseen": "2024-03-02T20:08Z", + "manufacturername": "Signify Netherlands B.V.", + "modelid": "LWA001", + "name": "Opbergruimte Lamp Plafond", + "productid": "Philips-LWA001-1-A19DLv5", + "productname": "Hue white lamp", + "state": { + "alert": "none", + "bri": 76, + "effect": "none", + "on": False, + "reachable": True, + }, + "swconfigid": "87169548", + "swversion": "1.104.2", + "type": "Dimmable light", + "uniqueid": "00:17:88:01:08:11:22:33-01", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_verify_group_color_mode_fallback( + hass: HomeAssistant, + mock_websocket_data: WebsocketDataType, +) -> None: + """Test that group supported features reflect what included lights support.""" group_state = hass.states.get("light.opbergruimte") assert group_state.state == STATE_OFF assert group_state.attributes[ATTR_COLOR_MODE] is None - await mock_deconz_websocket( - data={ - "e": "changed", - "id": "13", + await mock_websocket_data( + { + "id": "0", "r": "lights", "state": { "alert": "none", @@ -1507,17 +1289,14 @@ async def test_verify_group_color_mode_fallback( "on": True, "reachable": True, }, - "t": "event", "uniqueid": "00:17:88:01:08:11:22:33-01", } ) - await mock_deconz_websocket( - data={ - "e": "changed", + await mock_websocket_data( + { "id": "43", "r": "groups", "state": {"all_on": True, "any_on": True}, - "t": "event", } ) group_state = hass.states.get("light.opbergruimte") diff --git a/tests/components/deconz/test_lock.py b/tests/components/deconz/test_lock.py index 03d14802083..28d60e403ef 100644 --- a/tests/components/deconz/test_lock.py +++ b/tests/components/deconz/test_lock.py @@ -1,79 +1,56 @@ """deCONZ lock platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, SERVICE_LOCK, SERVICE_UNLOCK, ) -from homeassistant.const import ( - ATTR_ENTITY_ID, - STATE_LOCKED, - STATE_UNAVAILABLE, - STATE_UNLOCKED, -) +from homeassistant.const import ATTR_ENTITY_ID, STATE_LOCKED, STATE_UNLOCKED from homeassistant.core import HomeAssistant -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_locks( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no lock entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - +@pytest.mark.parametrize( + "light_payload", + [ + { + "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", + "hascolor": False, + "lastannounced": None, + "lastseen": "2020-08-22T15:29:03Z", + "manufacturername": "Danalock", + "modelid": "V3-BTZB", + "name": "Door lock", + "state": {"alert": "none", "on": False, "reachable": True}, + "swversion": "19042019", + "type": "Door Lock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lock_from_light( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, ) -> None: """Test that all supported lock entities based on lights are created.""" - data = { - "lights": { - "1": { - "etag": "5c2ec06cde4bd654aef3a555fcd8ad12", - "hascolor": False, - "lastannounced": None, - "lastseen": "2020-08-22T15:29:03Z", - "manufacturername": "Danalock", - "modelid": "V3-BTZB", - "name": "Door lock", - "state": {"alert": "none", "on": False, "reachable": True}, - "swversion": "19042019", - "type": "Door Lock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"on": True}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": True}}) assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service lock door @@ -95,68 +72,49 @@ async def test_lock_from_light( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 1 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "battery": 100, + "lock": False, + "on": True, + "reachable": True, + }, + "ep": 11, + "etag": "a43862f76b7fa48b0fbb9107df123b0e", + "lastseen": "2021-03-06T22:25Z", + "manufacturername": "Onesti Products AS", + "modelid": "easyCodeTouch_v1", + "name": "Door lock", + "state": { + "lastupdated": "2021-03-06T21:25:45.624", + "lockstate": "unlocked", + }, + "swversion": "20201211", + "type": "ZHADoorLock", + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_lock_from_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + sensor_ws_data: WebsocketDataType, ) -> None: """Test that all supported lock entities based on sensors are created.""" - data = { - "sensors": { - "1": { - "config": { - "battery": 100, - "lock": False, - "on": True, - "reachable": True, - }, - "ep": 11, - "etag": "a43862f76b7fa48b0fbb9107df123b0e", - "lastseen": "2021-03-06T22:25Z", - "manufacturername": "Onesti Products AS", - "modelid": "easyCodeTouch_v1", - "name": "Door lock", - "state": { - "lastupdated": "2021-03-06T21:25:45.624", - "lockstate": "unlocked", - }, - "swversion": "20201211", - "type": "ZHADoorLock", - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 2 assert hass.states.get("lock.door_lock").state == STATE_UNLOCKED - event_changed_light = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "state": {"lockstate": "locked"}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await sensor_ws_data({"state": {"lockstate": "locked"}}) assert hass.states.get("lock.door_lock").state == STATE_LOCKED # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/1/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service lock door @@ -177,14 +135,3 @@ async def test_lock_from_sensor( blocking=True, ) assert aioclient_mock.mock_calls[2][2] == {"lock": False} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_logbook.py b/tests/components/deconz/test_logbook.py index 5940d2e8e34..d23680225f1 100644 --- a/tests/components/deconz/test_logbook.py +++ b/tests/components/deconz/test_logbook.py @@ -1,6 +1,8 @@ """The tests for deCONZ logbook.""" -from unittest.mock import patch +from typing import Any + +import pytest from homeassistant.components.deconz.const import CONF_GESTURE, DOMAIN as DECONZ_DOMAIN from homeassistant.components.deconz.deconz_event import ( @@ -21,51 +23,47 @@ from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from homeassistant.util import slugify -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration - from tests.components.logbook.common import MockRow, mock_humanify -from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "armed": "disarmed", + "enrolled": 0, + "on": True, + "panel": "disarmed", + "pending": [], + "reachable": True, + }, + "ep": 1, + "etag": "3c4008d74035dfaa1f0bb30d24468b12", + "lastseen": "2021-04-02T13:07Z", + "manufacturername": "Universal Electronics Inc", + "modelid": "URC4450BC0-X-R", + "name": "Keypad", + "state": { + "action": "armed_away,1111,55", + "lastupdated": "2021-04-02T13:08:18.937", + "lowbattery": False, + "tampered": True, + }, + "type": "ZHAAncillaryControl", + "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_humanifying_deconz_alarm_event( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, + sensor_payload: dict[str, Any], ) -> None: - """Test humanifying deCONZ event.""" - data = { - "sensors": { - "1": { - "config": { - "armed": "disarmed", - "enrolled": 0, - "on": True, - "panel": "disarmed", - "pending": [], - "reachable": True, - }, - "ep": 1, - "etag": "3c4008d74035dfaa1f0bb30d24468b12", - "lastseen": "2021-04-02T13:07Z", - "manufacturername": "Universal Electronics Inc", - "modelid": "URC4450BC0-X-R", - "name": "Keypad", - "state": { - "action": "armed_away,1111,55", - "lastupdated": "2021-04-02T13:08:18.937", - "lowbattery": False, - "tampered": True, - }, - "type": "ZHAAncillaryControl", - "uniqueid": "00:0d:6f:00:13:4f:61:39-01-0501", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - keypad_event_id = slugify(data["sensors"]["1"]["name"]) - keypad_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) + """Test humanifying deCONZ alarm event.""" + keypad_event_id = slugify(sensor_payload["name"]) + keypad_serial = serial_from_unique_id(sensor_payload["uniqueid"]) keypad_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, keypad_serial)} ) @@ -113,14 +111,10 @@ async def test_humanifying_deconz_alarm_event( assert events[1]["message"] == "fired event 'armed_away'" -async def test_humanifying_deconz_event( - hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, -) -> None: - """Test humanifying deCONZ event.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -152,30 +146,35 @@ async def test_humanifying_deconz_event( "uniqueid": "00:00:00:00:00:00:00:04-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - - switch_event_id = slugify(data["sensors"]["1"]["name"]) - switch_serial = serial_from_unique_id(data["sensors"]["1"]["uniqueid"]) + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_humanifying_deconz_event( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + sensor_payload: dict[str, Any], +) -> None: + """Test humanifying deCONZ event.""" + switch_event_id = slugify(sensor_payload["1"]["name"]) + switch_serial = serial_from_unique_id(sensor_payload["1"]["uniqueid"]) switch_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, switch_serial)} ) - hue_remote_event_id = slugify(data["sensors"]["2"]["name"]) - hue_remote_serial = serial_from_unique_id(data["sensors"]["2"]["uniqueid"]) + hue_remote_event_id = slugify(sensor_payload["2"]["name"]) + hue_remote_serial = serial_from_unique_id(sensor_payload["2"]["uniqueid"]) hue_remote_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, hue_remote_serial)} ) - xiaomi_cube_event_id = slugify(data["sensors"]["3"]["name"]) - xiaomi_cube_serial = serial_from_unique_id(data["sensors"]["3"]["uniqueid"]) + xiaomi_cube_event_id = slugify(sensor_payload["3"]["name"]) + xiaomi_cube_serial = serial_from_unique_id(sensor_payload["3"]["uniqueid"]) xiaomi_cube_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, xiaomi_cube_serial)} ) - faulty_event_id = slugify(data["sensors"]["4"]["name"]) - faulty_serial = serial_from_unique_id(data["sensors"]["4"]["uniqueid"]) + faulty_event_id = slugify(sensor_payload["4"]["name"]) + faulty_serial = serial_from_unique_id(sensor_payload["4"]["uniqueid"]) faulty_entry = device_registry.async_get_device( identifiers={(DECONZ_DOMAIN, faulty_serial)} ) diff --git a/tests/components/deconz/test_number.py b/tests/components/deconz/test_number.py index 655ae2f42e2..962c2c0a89b 100644 --- a/tests/components/deconz/test_number.py +++ b/tests/components/deconz/test_number.py @@ -1,36 +1,27 @@ """deCONZ number platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_number_entities( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no number entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Presence sensor - delay configuration { @@ -46,19 +37,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "number.presence_sensor_delay", - "unique_id": "00:00:00:00:00:00:00:00-00-delay", - "state": "0", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "min": 0, - "max": 65535, - "step": 1, - "mode": "auto", - "friendly_name": "Presence sensor Delay", - }, "websocket_event": {"config": {"delay": 10}}, "next_state": "10", "supported_service_value": 111, @@ -82,19 +61,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:00-00", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "number.presence_sensor_duration", - "unique_id": "00:00:00:00:00:00:00:00-00-duration", - "state": "0", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "min": 0, - "max": 65535, - "step": 1, - "mode": "auto", - "friendly_name": "Presence sensor Duration", - }, "websocket_event": {"config": {"duration": 10}}, "next_state": "10", "supported_service_value": 111, @@ -107,57 +74,29 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_number_entities( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of number entities.""" - - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"0": sensor_data}}): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - entity = hass.states.get(expected["entity_id"]) - assert entity.state == expected["state"] - assert entity.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.NUMBER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "0", - } | expected["websocket_event"] - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(expected["websocket_event"]) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/sensors/0/config") + aioclient_mock = mock_put_request("/sensors/0/config") # Service set supported value @@ -197,14 +136,3 @@ async def test_number_entities( }, blocking=True, ) - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_scene.py b/tests/components/deconz/test_scene.py index 2bace605db5..c1240b6881c 100644 --- a/tests/components/deconz/test_scene.py +++ b/tests/components/deconz/test_scene.py @@ -1,97 +1,60 @@ """deCONZ scene platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.scene import DOMAIN as SCENE_DOMAIN, SERVICE_TURN_ON -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_scenes( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that scenes can be loaded without scenes being available.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Scene { - "groups": { - "1": { - "id": "Light group id", - "name": "Light group", - "type": "LightGroup", - "state": {"all_on": False, "any_on": True}, - "action": {}, - "scenes": [{"id": "1", "name": "Scene"}], - "lights": [], - } + "1": { + "id": "Light group id", + "name": "Light group", + "type": "LightGroup", + "state": {"all_on": False, "any_on": True}, + "action": {}, + "scenes": [{"id": "1", "name": "Scene"}], + "lights": [], } }, { - "entity_count": 2, - "device_count": 3, "entity_id": "scene.light_group_scene", - "unique_id": "01234E56789A/groups/1/scenes/1", - "entity_category": None, - "attributes": { - "friendly_name": "Light group Scene", - }, "request": "/groups/1/scenes/1/recall", }, ), ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("group_payload", "expected"), TEST_DATA) async def test_scenes( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of scene entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - scene = hass.states.get(expected["entity_id"]) - assert scene.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SCENE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify button press - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( SCENE_DOMAIN, @@ -101,24 +64,11 @@ async def test_scenes( ) assert aioclient_mock.mock_calls[1][2] == {} - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_only_new_scenes_are_created( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that scenes works.""" - data = { - "groups": { +@pytest.mark.parametrize( + "group_payload", + [ + { "1": { "id": "Light group id", "name": "Light group", @@ -129,20 +79,20 @@ async def test_only_new_scenes_are_created( "lights": [], } } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_only_new_scenes_are_created( + hass: HomeAssistant, + mock_websocket_data: WebsocketDataType, +) -> None: + """Test that scenes works.""" assert len(hass.states.async_all()) == 2 event_changed_group = { - "t": "event", - "e": "changed", "r": "groups", "id": "1", "scenes": [{"id": "1", "name": "Scene"}], } - await mock_deconz_websocket(data=event_changed_group) - await hass.async_block_till_done() - + await mock_websocket_data(event_changed_group) assert len(hass.states.async_all()) == 2 diff --git a/tests/components/deconz/test_select.py b/tests/components/deconz/test_select.py index fb8f41293a2..900283d88bb 100644 --- a/tests/components/deconz/test_select.py +++ b/tests/components/deconz/test_select.py @@ -1,5 +1,7 @@ """deCONZ select platform tests.""" +from collections.abc import Callable +from typing import Any from unittest.mock import patch from pydeconz.models.sensor.presence import ( @@ -7,202 +9,135 @@ from pydeconz.models.sensor.presence import ( PresenceConfigTriggerDistance, ) import pytest +from syrupy import SnapshotAssertion from homeassistant.components.select import ( ATTR_OPTION, DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType +from tests.common import snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker - -async def test_no_select_entities( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - TEST_DATA = [ ( # Presence Device Mode { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_device_mode", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-device_mode", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Device Mode", - "options": ["leftright", "undirected"], - }, "option": PresenceConfigDeviceMode.LEFT_AND_RIGHT.value, - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"devicemode": "leftright"}, }, ), ( # Presence Sensitivity { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_sensitivity", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-sensitivity", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Sensitivity", - "options": ["High", "Medium", "Low"], - }, "option": "Medium", - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"sensitivity": 2}, }, ), ( # Presence Trigger Distance { - "sensors": { - "1": { - "config": { - "devicemode": "undirected", - "on": True, - "reachable": True, - "sensitivity": 3, - "triggerdistance": "medium", - }, - "etag": "13ff209f9401b317987d42506dd4cd79", - "lastannounced": None, - "lastseen": "2022-06-28T23:13Z", - "manufacturername": "aqara", - "modelid": "lumi.motion.ac01", - "name": "Aqara FP1", - "state": { - "lastupdated": "2022-06-28T23:13:38.577", - "presence": True, - "presenceevent": "leave", - }, - "swversion": "20210121", - "type": "ZHAPresence", - "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", - } - } + "config": { + "devicemode": "undirected", + "on": True, + "reachable": True, + "sensitivity": 3, + "triggerdistance": "medium", + }, + "etag": "13ff209f9401b317987d42506dd4cd79", + "lastannounced": None, + "lastseen": "2022-06-28T23:13Z", + "manufacturername": "aqara", + "modelid": "lumi.motion.ac01", + "name": "Aqara FP1", + "state": { + "lastupdated": "2022-06-28T23:13:38.577", + "presence": True, + "presenceevent": "leave", + }, + "swversion": "20210121", + "type": "ZHAPresence", + "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406", }, { - "entity_count": 5, - "device_count": 3, "entity_id": "select.aqara_fp1_trigger_distance", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-0406-trigger_distance", - "entity_category": EntityCategory.CONFIG, - "attributes": { - "friendly_name": "Aqara FP1 Trigger Distance", - "options": ["far", "medium", "near"], - }, "option": PresenceConfigTriggerDistance.FAR.value, - "request": "/sensors/1/config", + "request": "/sensors/0/config", "request_data": {"triggerdistance": "far"}, }, ), ] -@pytest.mark.parametrize(("raw_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) async def test_select( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - raw_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of button entities.""" - with patch.dict(DECONZ_WEB_REQUEST, raw_data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify state data - - button = hass.states.get(expected["entity_id"]) - assert button.attributes == expected["attributes"] - - # Verify entity registry data - - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry data - - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SELECT]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Verify selecting option - - mock_deconz_put_request(aioclient_mock, config_entry.data, expected["request"]) + aioclient_mock = mock_put_request(expected["request"]) await hass.services.async_call( SELECT_DOMAIN, @@ -214,14 +149,3 @@ async def test_select( blocking=True, ) assert aioclient_mock.mock_calls[1][2] == expected["request_data"] - - # Unload entry - - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index 1e1ca6efe7c..e6ae85df615 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -1,42 +1,23 @@ """deCONZ sensor platform tests.""" from datetime import timedelta +from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.deconz.const import CONF_ALLOW_CLIP_SENSOR -from homeassistant.components.sensor import ( - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - CONCENTRATION_PARTS_PER_BILLION, - CONCENTRATION_PARTS_PER_MILLION, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.util import dt as dt_util -from .test_gateway import DECONZ_WEB_REQUEST, setup_deconz_integration - -from tests.common import async_fire_time_changed -from tests.test_util.aiohttp import AiohttpClientMocker - - -async def test_no_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no sensors in deconz results in no sensor entities.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 +from .conftest import ConfigEntryFactoryType, WebsocketDataType +from tests.common import async_fire_time_changed, snapshot_platform TEST_DATA = [ ( # Air quality sensor @@ -61,17 +42,7 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor", - "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality", - "state": "poor", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "friendly_name": "BOSCH Air quality sensor", - }, "websocket_event": {"state": {"airquality": "excellent"}}, "next_state": "excellent", }, @@ -98,19 +69,7 @@ TEST_DATA = [ "uniqueid": "00:12:4b:00:14:4d:00:07-02-fdef", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.bosch_air_quality_sensor_ppb", - "unique_id": "00:12:4b:00:14:4d:00:07-02-fdef-air_quality_ppb", - "state": "809", - "entity_category": None, - "device_class": None, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "BOSCH Air quality sensor PPB", - "state_class": "measurement", - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"airqualityppb": 1000}}, "next_state": "1000", }, @@ -137,20 +96,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_co2", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_co2", - "state": "359", - "entity_category": None, - "device_class": SensorDeviceClass.CO2, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 CO2", - "device_class": SensorDeviceClass.CO2, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_MILLION, - }, "websocket_event": {"state": {"airquality_co2_density": 332}}, "next_state": "332", }, @@ -177,20 +123,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_ch2o", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_formaldehyde", - "state": "4", - "entity_category": None, - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 CH2O", - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"airquality_formaldehyde_density": 5}}, "next_state": "5", }, @@ -217,20 +150,7 @@ TEST_DATA = [ "uniqueid": "00:00:00:00:00:00:00:01-02-0113", }, { - "entity_count": 4, - "device_count": 3, "entity_id": "sensor.airquality_1_pm25", - "unique_id": "00:00:00:00:00:00:00:01-02-0113-air_quality_pm2_5", - "state": "8", - "entity_category": None, - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "AirQuality 1 PM25", - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"pm2_5": 11}}, "next_state": "11", }, @@ -256,21 +176,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:ff:fe:01:23:45-01-0001", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.fyrtur_block_out_roller_blind_battery", - "unique_id": "00:0d:6f:ff:fe:01:23:45-01-0001-battery", - "state": "100", - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": SensorDeviceClass.BATTERY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "%", - "device_class": "battery", - "friendly_name": "FYRTUR block-out roller blind Battery", - }, "websocket_event": {"state": {"battery": 50}}, "next_state": "50", }, @@ -300,20 +206,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.carbondioxide_35", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-040d-carbon_dioxide", - "state": "370", - "entity_category": None, - "device_class": SensorDeviceClass.CO2, - "state_class": CONCENTRATION_PARTS_PER_BILLION, - "attributes": { - "device_class": "carbon_dioxide", - "friendly_name": "CarbonDioxide 35", - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"measured_value": 500}}, "next_state": "500", }, @@ -335,22 +228,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0702", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.consumption_15", - "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0702-consumption", - "state": "11.342", - "entity_category": None, - "device_class": SensorDeviceClass.ENERGY, - "state_class": SensorStateClass.TOTAL_INCREASING, - "attributes": { - "state_class": "total_increasing", - "on": True, - "power": 123, - "unit_of_measurement": "kWh", - "device_class": "energy", - "friendly_name": "Consumption 15", - }, "websocket_event": {"state": {"consumption": 10000}}, "next_state": "10.0", }, @@ -378,21 +256,7 @@ TEST_DATA = [ }, { "enable_entity": True, - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.daylight", - "unique_id": "01:23:4E:FF:FF:56:78:9A-01-daylight_status", - "old-unique_id": "01:23:4E:FF:FF:56:78:9A-01", - "state": "solar_noon", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "on": True, - "daylight": True, - "icon": "mdi:white-balance-sunny", - "friendly_name": "Daylight", - }, "websocket_event": {"state": {"status": 210}}, "next_state": "dusk", }, @@ -422,20 +286,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.formaldehyde_34", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042b-formaldehyde", - "state": "1", - "entity_category": None, - "device_class": SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "device_class": "volatile_organic_compounds", - "friendly_name": "Formaldehyde 34", - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_PARTS_PER_BILLION, - }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -459,18 +310,7 @@ TEST_DATA = [ "uniqueid": "fsm-state-1520195376277", }, { - "entity_count": 1, - "device_count": 2, "entity_id": "sensor.fsm_state_motion_stair", - "unique_id": "fsm-state-1520195376277-status", - "state": "0", - "entity_category": None, - "device_class": None, - "state_class": None, - "attributes": { - "on": True, - "friendly_name": "FSM_STATE Motion stair", - }, "websocket_event": {"state": {"status": 1}}, "next_state": "1", }, @@ -497,24 +337,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0405", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0405-humidity", - "state": "35.55", - "entity_category": None, - "device_class": SensorDeviceClass.HUMIDITY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "%", - "device_class": "humidity", - "friendly_name": "Mi temperature 1", - }, - "options": { - "suggested_display_precision": 1, - }, "websocket_event": {"state": {"humidity": 1000}}, "next_state": "10.0", }, @@ -538,20 +361,7 @@ TEST_DATA = [ "uniqueid": "a4:c1:38:fe:86:8f:07:a3-01-0408", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "sensor.soil_sensor", - "unique_id": "a4:c1:38:fe:86:8f:07:a3-01-0408-moisture", - "state": "72.13", - "entity_category": None, - "device_class": SensorDeviceClass.MOISTURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "unit_of_measurement": "%", - "device_class": "moisture", - "friendly_name": "Soil Sensor", - }, "websocket_event": {"state": {"moisture": 6923}}, "next_state": "69.23", }, @@ -586,23 +396,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:03:28:8c:9b-02-0400", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.motion_sensor_4", - "unique_id": "00:17:88:01:03:28:8c:9b-02-0400-light_level", - "state": "5.0", - "entity_category": None, - "device_class": SensorDeviceClass.ILLUMINANCE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "on": True, - "dark": True, - "daylight": False, - "unit_of_measurement": "lx", - "device_class": "illuminance", - "friendly_name": "Motion sensor 4", - "state_class": "measurement", - }, "websocket_event": {"state": {"lightlevel": 1000}}, "next_state": "1.3", }, @@ -638,20 +432,7 @@ TEST_DATA = [ "uniqueid": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.starkvind_airpurifier_pm25", - "unique_id": "xx:xx:xx:xx:xx:xx:xx:xx-01-042a-particulate_matter_pm2_5", - "state": "1", - "entity_category": None, - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "friendly_name": "STARKVIND AirPurifier PM25", - "device_class": SensorDeviceClass.PM25, - "state_class": SensorStateClass.MEASUREMENT, - "unit_of_measurement": CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - }, "websocket_event": {"state": {"measured_value": 2}}, "next_state": "2", }, @@ -677,23 +458,7 @@ TEST_DATA = [ "uniqueid": "00:0d:6f:00:0b:7a:64:29-01-0b04", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.power_16", - "unique_id": "00:0d:6f:00:0b:7a:64:29-01-0b04-power", - "state": "64", - "entity_category": None, - "device_class": SensorDeviceClass.POWER, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "current": 34, - "voltage": 231, - "unit_of_measurement": "W", - "device_class": "power", - "friendly_name": "Power 16", - }, "websocket_event": {"state": {"power": 1000}}, "next_state": "1000", }, @@ -719,21 +484,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0403", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0403-pressure", - "state": "1010", - "entity_category": None, - "device_class": SensorDeviceClass.PRESSURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "hPa", - "device_class": "pressure", - "friendly_name": "Mi temperature 1", - }, "websocket_event": {"state": {"pressure": 500}}, "next_state": "500", }, @@ -760,24 +511,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:45:dc:53-01-0402", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.mi_temperature_1", - "unique_id": "00:15:8d:00:02:45:dc:53-01-0402-temperature", - "state": "21.82", - "entity_category": None, - "device_class": SensorDeviceClass.TEMPERATURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "unit_of_measurement": "°C", - "device_class": "temperature", - "friendly_name": "Mi temperature 1", - }, - "options": { - "suggested_display_precision": 1, - }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "18.0", }, @@ -806,17 +540,7 @@ TEST_DATA = [ "uniqueid": "cc:cc:cc:ff:fe:38:4d:b3-01-000a", }, { - "entity_count": 2, - "device_count": 3, "entity_id": "sensor.etrv_sejour", - "unique_id": "cc:cc:cc:ff:fe:38:4d:b3-01-000a-last_set", - "state": "2020-11-19T08:07:08+00:00", - "entity_category": None, - "device_class": SensorDeviceClass.TIMESTAMP, - "attributes": { - "device_class": "timestamp", - "friendly_name": "eTRV Séjour", - }, "websocket_event": {"state": {"lastset": "2020-12-14T10:12:14Z"}}, "next_state": "2020-12-14T10:12:14+00:00", }, @@ -845,20 +569,7 @@ TEST_DATA = [ "uniqueid": "00:15:8d:00:02:b5:d1:80-01-0500", }, { - "entity_count": 3, - "device_count": 3, "entity_id": "sensor.alarm_10_temperature", - "unique_id": "00:15:8d:00:02:b5:d1:80-01-0500-internal_temperature", - "state": "26.0", - "entity_category": None, - "device_class": SensorDeviceClass.TEMPERATURE, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "unit_of_measurement": "°C", - "device_class": "temperature", - "friendly_name": "Alarm 10 Temperature", - }, "websocket_event": {"state": {"temperature": 1800}}, "next_state": "26.0", }, @@ -886,22 +597,7 @@ TEST_DATA = [ "uniqueid": "00:17:88:01:02:0e:32:a3-02-fc00", }, { - "entity_count": 1, - "device_count": 3, "entity_id": "sensor.dimmer_switch_3_battery", - "unique_id": "00:17:88:01:02:0e:32:a3-02-fc00-battery", - "state": "90", - "entity_category": EntityCategory.DIAGNOSTIC, - "device_class": SensorDeviceClass.BATTERY, - "state_class": SensorStateClass.MEASUREMENT, - "attributes": { - "state_class": "measurement", - "on": True, - "event_id": "dimmer_switch_3", - "unit_of_measurement": "%", - "device_class": "battery", - "friendly_name": "Dimmer switch 3 Battery", - }, "websocket_event": {"config": {"battery": 80}}, "next_state": "80", }, @@ -909,22 +605,19 @@ TEST_DATA = [ ] -@pytest.mark.parametrize(("sensor_data", "expected"), TEST_DATA) +@pytest.mark.parametrize(("sensor_payload", "expected"), TEST_DATA) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) async def test_sensors( hass: HomeAssistant, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, - mock_deconz_websocket, - sensor_data, - expected, + config_entry_factory: ConfigEntryFactoryType, + sensor_ws_data: WebsocketDataType, + expected: dict[str, Any], + snapshot: SnapshotAssertion, ) -> None: """Test successful creation of sensor entities.""" - - with patch.dict(DECONZ_WEB_REQUEST, {"sensors": {"1": sensor_data}}): - config_entry = await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: True} - ) + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() # Enable in entity registry if expected.get("enable_entity"): @@ -939,79 +632,37 @@ async def test_sensors( ) await hass.async_block_till_done() - assert len(hass.states.async_all()) == expected["entity_count"] - - # Verify entity state - sensor = hass.states.get(expected["entity_id"]) - assert sensor.state == expected["state"] - assert sensor.attributes.get(ATTR_DEVICE_CLASS) == expected["device_class"] - assert sensor.attributes == expected["attributes"] - - # Verify entity registry - assert ( - entity_registry.async_get(expected["entity_id"]).entity_category - is expected["entity_category"] - ) - ent_reg_entry = entity_registry.async_get(expected["entity_id"]) - assert ent_reg_entry.entity_category is expected["entity_category"] - assert ent_reg_entry.unique_id == expected["unique_id"] - - # Verify device registry - assert ( - len(dr.async_entries_for_config_entry(device_registry, config_entry.entry_id)) - == expected["device_count"] - ) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Change state - event_changed_sensor = {"t": "event", "e": "changed", "r": "sensors", "id": "1"} - event_changed_sensor |= expected["websocket_event"] - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() + await sensor_ws_data(expected["websocket_event"]) assert hass.states.get(expected["entity_id"]).state == expected["next_state"] - # Unload entry - await hass.config_entries.async_unload(config_entry.entry_id) - assert hass.states.get(expected["entity_id"]).state == STATE_UNAVAILABLE - - # Remove entry - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - - -async def test_not_allow_clip_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "CLIP temperature sensor", + "type": "CLIPTemperature", + "state": {"temperature": 2600}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:02-00", + }, + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: False}]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_not_allow_clip_sensor(hass: HomeAssistant) -> None: """Test that CLIP sensors are not allowed.""" - data = { - "sensors": { - "1": { - "name": "CLIP temperature sensor", - "type": "CLIPTemperature", - "state": {"temperature": 2600}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:02-00", - }, - } - } - - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_ALLOW_CLIP_SENSOR: False} - ) - assert len(hass.states.async_all()) == 0 -async def test_allow_clip_sensors( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that CLIP sensors can be allowed.""" - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Light level sensor", "type": "ZHALightLevel", @@ -1039,17 +690,19 @@ async def test_allow_clip_sensors( "uniqueid": "/sensors/3", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration( - hass, - aioclient_mock, - options={CONF_ALLOW_CLIP_SENSOR: True}, - ) - - assert len(hass.states.async_all()) == 4 - assert hass.states.get("sensor.clip_light_level_sensor").state == "999.8" - assert hass.states.get("sensor.clip_flur").state == "0" + ], +) +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_CLIP_SENSOR: True}]) +async def test_allow_clip_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Test that CLIP sensors can be allowed.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) # Disallow clip sensors @@ -1074,15 +727,14 @@ async def test_allow_clip_sensors( assert hass.states.get("sensor.clip_flur").state == "0" +@pytest.mark.usefixtures("config_entry_setup") async def test_add_new_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, ) -> None: """Test that adding a new sensor works.""" event_added_sensor = { - "t": "event", "e": "added", - "r": "sensors", - "id": "1", "sensor": { "id": "Light sensor id", "name": "Light level sensor", @@ -1093,13 +745,9 @@ async def test_add_new_sensor( }, } - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - await mock_deconz_websocket(data=event_added_sensor) - await hass.async_block_till_done() - + await sensor_ws_data(event_added_sensor) assert len(hass.states.async_all()) == 2 assert hass.states.get("sensor.light_level_sensor").state == "999.8" @@ -1115,71 +763,58 @@ BAD_SENSOR_DATA = [ @pytest.mark.parametrize(("sensor_type", "sensor_property"), BAD_SENSOR_DATA) async def test_dont_add_sensor_if_state_is_none( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, - sensor_type, - sensor_property, + config_entry_factory: ConfigEntryFactoryType, + sensor_payload: dict[str, Any], + sensor_type: str, + sensor_property: str, ) -> None: """Test sensor with scaled data is not created if state is None.""" - data = { - "sensors": { - "1": { - "name": "Sensor 1", - "type": sensor_type, - "state": {sensor_property: None}, - "config": {}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - } - } + sensor_payload["0"] = { + "name": "Sensor 1", + "type": sensor_type, + "state": {sensor_property: None}, + "config": {}, + "uniqueid": "00:00:00:00:00:00:00:00-00", } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() assert len(hass.states.async_all()) == 0 -async def test_air_quality_sensor_without_ppb( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test sensor with scaled data is not created if state is None.""" - data = { - "sensors": { - "1": { - "config": { - "on": True, - "reachable": True, - }, - "ep": 2, - "etag": "c2d2e42396f7c78e11e46c66e2ec0200", - "lastseen": "2020-11-20T22:48Z", - "manufacturername": "BOSCH", - "modelid": "AIR", - "name": "BOSCH Air quality sensor", - "state": { - "airquality": "poor", - "lastupdated": "2020-11-20T22:48:00.209", - }, - "swversion": "20200402", - "type": "ZHAAirQuality", - "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", - } +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "config": { + "on": True, + "reachable": True, + }, + "ep": 2, + "etag": "c2d2e42396f7c78e11e46c66e2ec0200", + "lastseen": "2020-11-20T22:48Z", + "manufacturername": "BOSCH", + "modelid": "AIR", + "name": "BOSCH Air quality sensor", + "state": { + "airquality": "poor", + "lastupdated": "2020-11-20T22:48:00.209", + }, + "swversion": "20200402", + "type": "ZHAAirQuality", + "uniqueid": "00:00:00:00:00:00:00:00-02-fdef", } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_air_quality_sensor_without_ppb(hass: HomeAssistant) -> None: + """Test sensor with scaled data is not created if state is None.""" assert len(hass.states.async_all()) == 1 -async def test_add_battery_later( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that a battery sensor can be created later on. - - Without an initial battery state a battery sensor - can be created once a value is reported. - """ - data = { - "sensors": { +@pytest.mark.parametrize( + "sensor_payload", + [ + { "1": { "name": "Switch 1", "type": "ZHASwitch", @@ -1195,190 +830,175 @@ async def test_add_battery_later( "uniqueid": "00:00:00:00:00:00:00:00-00-0001", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_add_battery_later( + hass: HomeAssistant, + sensor_ws_data: WebsocketDataType, +) -> None: + """Test that a battery sensor can be created later on. + Without an initial battery state a battery sensor + can be created once a value is reported. + """ assert len(hass.states.async_all()) == 0 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "2", - "config": {"battery": 50}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "2", "config": {"battery": 50}}) assert len(hass.states.async_all()) == 0 - event_changed_sensor = { - "t": "event", - "e": "changed", - "r": "sensors", - "id": "1", - "config": {"battery": 50}, - } - await mock_deconz_websocket(data=event_changed_sensor) - await hass.async_block_till_done() - + await sensor_ws_data({"id": "1", "config": {"battery": 50}}) assert len(hass.states.async_all()) == 1 - assert hass.states.get("sensor.switch_1_battery").state == "50" @pytest.mark.parametrize("model_id", ["0x8030", "0x8031", "0x8034", "0x8035"]) async def test_special_danfoss_battery_creation( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, model_id + hass: HomeAssistant, + config_entry_factory: ConfigEntryFactoryType, + sensor_payload: dict[str, Any], + model_id: str, ) -> None: """Test the special Danfoss battery creation works. Normally there should only be one battery sensor per device from deCONZ. With specific Danfoss devices each endpoint can report its own battery state. """ - data = { - "sensors": { - "1": { - "config": { - "battery": 70, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 1, - "etag": "982d9acc38bee5b251e24a9be26558e4", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:07.994", - "on": False, - "temperature": 2307, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", + sensor_payload |= { + "1": { + "config": { + "battery": 70, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - "2": { - "config": { - "battery": 86, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 2, - "etag": "62f12749f9f51c950086aff37dd02b61", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:22.399", - "on": False, - "temperature": 2316, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", + "ep": 1, + "etag": "982d9acc38bee5b251e24a9be26558e4", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:07.994", + "on": False, + "temperature": 2307, }, - "3": { - "config": { - "battery": 86, - "heatsetpoint": 2350, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 3, - "etag": "f50061174bb7f18a3d95789bab8b646d", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:25.466", - "on": False, - "temperature": 2337, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-01-0201", + }, + "2": { + "config": { + "battery": 86, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - "4": { - "config": { - "battery": 85, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 4, - "etag": "eea97adf8ce1b971b8b6a3a31793f96b", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": { - "lastupdated": "2021-02-15T12:23:41.939", - "on": False, - "temperature": 2333, - }, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", + "ep": 2, + "etag": "62f12749f9f51c950086aff37dd02b61", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:22.399", + "on": False, + "temperature": 2316, }, - "5": { - "config": { - "battery": 83, - "heatsetpoint": 2300, - "offset": 0, - "on": True, - "reachable": True, - "schedule": {}, - "schedule_on": False, - }, - "ep": 5, - "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", - "lastseen": "2021-02-15T12:23Z", - "manufacturername": "Danfoss", - "modelid": model_id, - "name": "0x8030", - "state": {"lastupdated": "none", "on": False, "temperature": 2325}, - "swversion": "YYYYMMDD", - "type": "ZHAThermostat", - "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-02-0201", + }, + "3": { + "config": { + "battery": 86, + "heatsetpoint": 2350, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, }, - } + "ep": 3, + "etag": "f50061174bb7f18a3d95789bab8b646d", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:25.466", + "on": False, + "temperature": 2337, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-03-0201", + }, + "4": { + "config": { + "battery": 85, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 4, + "etag": "eea97adf8ce1b971b8b6a3a31793f96b", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": { + "lastupdated": "2021-02-15T12:23:41.939", + "on": False, + "temperature": 2333, + }, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-04-0201", + }, + "5": { + "config": { + "battery": 83, + "heatsetpoint": 2300, + "offset": 0, + "on": True, + "reachable": True, + "schedule": {}, + "schedule_on": False, + }, + "ep": 5, + "etag": "1f7cd1a5d66dc27ac5eb44b8c47362fb", + "lastseen": "2021-02-15T12:23Z", + "manufacturername": "Danfoss", + "modelid": model_id, + "name": "0x8030", + "state": {"lastupdated": "none", "on": False, "temperature": 2325}, + "swversion": "YYYYMMDD", + "type": "ZHAThermostat", + "uniqueid": "58:8e:81:ff:fe:00:11:22-05-0201", + }, } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + + await config_entry_factory() assert len(hass.states.async_all()) == 10 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 -async def test_unsupported_sensor( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.parametrize( + "sensor_payload", + [{"type": "not supported", "name": "name", "state": {}, "config": {}}], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_unsupported_sensor(hass: HomeAssistant) -> None: """Test that unsupported sensors doesn't break anything.""" - data = { - "sensors": { - "0": {"type": "not supported", "name": "name", "state": {}, "config": {}} - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_services.py b/tests/components/deconz/test_services.py index de061fc4e8c..9a30564385c 100644 --- a/tests/components/deconz/test_services.py +++ b/tests/components/deconz/test_services.py @@ -1,6 +1,7 @@ """deCONZ service tests.""" -from unittest.mock import patch +from collections.abc import Callable +from typing import Any import pytest import voluptuous as vol @@ -23,31 +24,25 @@ from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .test_gateway import ( - BRIDGEID, - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - mock_deconz_request, - setup_deconz_integration, -) +from .test_hub import BRIDGE_ID -from tests.common import async_capture_events +from tests.common import MockConfigEntry, async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_FIELD: "/lights/2", - CONF_BRIDGE_ID: BRIDGEID, + CONF_BRIDGE_ID: BRIDGE_ID, SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/2") + aioclient_mock = mock_put_request("/lights/2") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -55,29 +50,28 @@ async def test_configure_service_with_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - data = { - "lights": { - "1": { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_ENTITY: "light.test", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1") + aioclient_mock = mock_put_request("/lights/0") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -85,30 +79,29 @@ async def test_configure_service_with_entity( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Test", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_entity_and_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that service invokes pydeconz with the correct path and data.""" - data = { - "lights": { - "1": { - "name": "Test", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - data = { SERVICE_ENTITY: "light.test", SERVICE_FIELD: "/state", SERVICE_DATA: {"on": True, "attr1": 10, "attr2": 20}, } - - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") await hass.services.async_call( DECONZ_DOMAIN, SERVICE_CONFIGURE_DEVICE, service_data=data, blocking=True @@ -116,11 +109,11 @@ async def test_configure_service_with_entity_and_field( assert aioclient_mock.mock_calls[1][2] == {"on": True, "attr1": 10, "attr2": 20} +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_bridgeid( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service fails on a bad bridge id.""" - await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -137,12 +130,9 @@ async def test_configure_service_with_faulty_bridgeid( assert len(aioclient_mock.mock_calls) == 0 -async def test_configure_service_with_faulty_field( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +@pytest.mark.usefixtures("config_entry_setup") +async def test_configure_service_with_faulty_field(hass: HomeAssistant) -> None: """Test that service fails on a bad field.""" - await setup_deconz_integration(hass, aioclient_mock) - data = {SERVICE_FIELD: "light/2", SERVICE_DATA: {}} with pytest.raises(vol.Invalid): @@ -151,11 +141,11 @@ async def test_configure_service_with_faulty_field( ) +@pytest.mark.usefixtures("config_entry_setup") async def test_configure_service_with_faulty_entity( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service on a non existing entity.""" - await setup_deconz_integration(hass, aioclient_mock) aioclient_mock.clear_requests() data = { @@ -171,13 +161,12 @@ async def test_configure_service_with_faulty_entity( assert len(aioclient_mock.mock_calls) == 0 +@pytest.mark.parametrize("config_entry_options", [{CONF_MASTER_GATEWAY: False}]) +@pytest.mark.usefixtures("config_entry_setup") async def test_calling_service_with_no_master_gateway_fails( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test that service call fails when no master gateway exist.""" - await setup_deconz_integration( - hass, aioclient_mock, options={CONF_MASTER_GATEWAY: False} - ) aioclient_mock.clear_requests() data = { @@ -193,18 +182,19 @@ async def test_calling_service_with_no_master_gateway_fails( assert len(aioclient_mock.mock_calls) == 0 +@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + deconz_payload: dict[str, Any], + mock_requests: Callable[[], None], ) -> None: """Test that service can refresh devices.""" - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 aioclient_mock.clear_requests() - data = { - "config": {}, + deconz_payload |= { "groups": { "1": { "id": "Group 1 id", @@ -234,43 +224,43 @@ async def test_service_refresh_devices( } }, } - - mock_deconz_request(aioclient_mock, config_entry.data, data) + mock_requests() await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} ) await hass.async_block_till_done() assert len(hass.states.async_all()) == 5 +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_service_refresh_devices_trigger_no_state_update( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + deconz_payload: dict[str, Any], + mock_requests, ) -> None: """Verify that gateway.ignore_state_updates are honored.""" - data = { - "sensors": { - "1": { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 1 captured_events = async_capture_events(hass, CONF_DECONZ_EVENT) aioclient_mock.clear_requests() - data = { - "config": {}, + deconz_payload |= { "groups": { "1": { "id": "Group 1 id", @@ -291,7 +281,7 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, "sensors": { - "1": { + "0": { "name": "Switch 1", "type": "ZHASwitch", "state": {"buttonevent": 1000}, @@ -300,11 +290,10 @@ async def test_service_refresh_devices_trigger_no_state_update( } }, } - - mock_deconz_request(aioclient_mock, config_entry.data, data) + mock_requests() await hass.services.async_call( - DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGEID} + DECONZ_DOMAIN, SERVICE_DEVICE_REFRESH, service_data={CONF_BRIDGE_ID: BRIDGE_ID} ) await hass.async_block_till_done() @@ -312,37 +301,38 @@ async def test_service_refresh_devices_trigger_no_state_update( assert len(captured_events) == 0 +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Light 0 name", + "state": {"reachable": True}, + "type": "Light", + "uniqueid": "00:00:00:00:00:00:00:01-00", + } + ], +) +@pytest.mark.parametrize( + "sensor_payload", + [ + { + "name": "Switch 1", + "type": "ZHASwitch", + "state": {"buttonevent": 1000, "gesture": 1}, + "config": {"battery": 100}, + "uniqueid": "00:00:00:00:00:00:00:03-00", + } + ], +) async def test_remove_orphaned_entries_service( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, ) -> None: """Test service works and also don't remove more than expected.""" - data = { - "lights": { - "1": { - "name": "Light 1 name", - "state": {"reachable": True}, - "type": "Light", - "uniqueid": "00:00:00:00:00:00:00:01-00", - } - }, - "sensors": { - "1": { - "name": "Switch 1", - "type": "ZHASwitch", - "state": {"buttonevent": 1000, "gesture": 1}, - "config": {"battery": 100}, - "uniqueid": "00:00:00:00:00:00:00:03-00", - }, - }, - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - device = device_registry.async_get_or_create( - config_entry_id=config_entry.entry_id, + config_entry_id=config_entry_setup.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "123")}, ) @@ -351,7 +341,7 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry.entry_id in entry.config_entries + if config_entry_setup.entry_id in entry.config_entries ] ) == 5 # Host, gateway, light, switch and orphan @@ -362,19 +352,23 @@ async def test_remove_orphaned_entries_service( DECONZ_DOMAIN, "12345", suggested_object_id="Orphaned sensor", - config_entry=config_entry, + config_entry=config_entry_setup, device_id=device.id, ) assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 3 # Light, switch battery and orphan ) await hass.services.async_call( DECONZ_DOMAIN, SERVICE_REMOVE_ORPHANED_ENTRIES, - service_data={CONF_BRIDGE_ID: BRIDGEID}, + service_data={CONF_BRIDGE_ID: BRIDGE_ID}, ) await hass.async_block_till_done() @@ -383,13 +377,17 @@ async def test_remove_orphaned_entries_service( [ entry for entry in device_registry.devices.values() - if config_entry.entry_id in entry.config_entries + if config_entry_setup.entry_id in entry.config_entries ] ) == 4 # Host, gateway, light and switch ) assert ( - len(er.async_entries_for_config_entry(entity_registry, config_entry.entry_id)) + len( + er.async_entries_for_config_entry( + entity_registry, config_entry_setup.entry_id + ) + ) == 2 # Light and switch battery ) diff --git a/tests/components/deconz/test_siren.py b/tests/components/deconz/test_siren.py index 62ed1b732b8..5c80feef38c 100644 --- a/tests/components/deconz/test_siren.py +++ b/tests/components/deconz/test_siren.py @@ -1,6 +1,8 @@ """deCONZ switch platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.siren import ATTR_DURATION, DOMAIN as SIREN_DOMAIN from homeassistant.const import ( @@ -9,61 +11,41 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "Warning device", + "type": "Warning device", + "state": {"alert": "lselect", "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) +@pytest.mark.usefixtures("config_entry_setup") async def test_sirens( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket + hass: HomeAssistant, + light_ws_data: WebsocketDataType, + mock_put_request: Callable[[str, str], AiohttpClientMocker], ) -> None: """Test that siren entities are created.""" - data = { - "lights": { - "1": { - "name": "Warning device", - "type": "Warning device", - "state": {"alert": "lselect", "reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:00-00", - }, - "2": { - "name": "Unsupported siren", - "type": "Not a siren", - "state": {"reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:01-00", - }, - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - - assert len(hass.states.async_all()) == 2 + assert len(hass.states.async_all()) == 1 assert hass.states.get("siren.warning_device").state == STATE_ON - assert not hass.states.get("siren.unsupported_siren") - - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"alert": None}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() + await light_ws_data({"state": {"alert": None}}) assert hass.states.get("siren.warning_device").state == STATE_OFF # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on siren @@ -94,14 +76,3 @@ async def test_sirens( blocking=True, ) assert aioclient_mock.mock_calls[3][2] == {"alert": "lselect", "ontime": 100} - - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 2 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 diff --git a/tests/components/deconz/test_switch.py b/tests/components/deconz/test_switch.py index 9ef2382a2e2..ed82b0c2ac3 100644 --- a/tests/components/deconz/test_switch.py +++ b/tests/components/deconz/test_switch.py @@ -1,6 +1,8 @@ """deCONZ switch platform tests.""" -from unittest.mock import patch +from collections.abc import Callable + +import pytest from homeassistant.components.deconz.const import DOMAIN as DECONZ_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN @@ -9,83 +11,65 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .test_gateway import ( - DECONZ_WEB_REQUEST, - mock_deconz_put_request, - setup_deconz_integration, -) +from .conftest import ConfigEntryFactoryType, WebsocketDataType from tests.test_util.aiohttp import AiohttpClientMocker -async def test_no_switches( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test that no switch entities are created.""" - await setup_deconz_integration(hass, aioclient_mock) - assert len(hass.states.async_all()) == 0 - - -async def test_power_plugs( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, mock_deconz_websocket -) -> None: - """Test that all supported switch entities are created.""" - data = { - "lights": { - "1": { +@pytest.mark.parametrize( + "light_payload", + [ + { + "0": { "name": "On off switch", "type": "On/Off plug-in unit", "state": {"on": True, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:00-00", }, - "2": { + "1": { "name": "Smart plug", "type": "Smart plug", "state": {"on": False, "reachable": True}, "uniqueid": "00:00:00:00:00:00:00:01-00", }, - "3": { + "2": { "name": "Unsupported switch", "type": "Not a switch", "state": {"reachable": True}, - "uniqueid": "00:00:00:00:00:00:00:03-00", + "uniqueid": "00:00:00:00:00:00:00:02-00", }, - "4": { + "3": { "name": "On off relay", "state": {"on": True, "reachable": True}, "type": "On/Off light", - "uniqueid": "00:00:00:00:00:00:00:04-00", + "uniqueid": "00:00:00:00:00:00:00:03-00", }, } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - config_entry = await setup_deconz_integration(hass, aioclient_mock) - + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_power_plugs( + hass: HomeAssistant, + mock_put_request: Callable[[str, str], AiohttpClientMocker], + light_ws_data: WebsocketDataType, +) -> None: + """Test that all supported switch entities are created.""" assert len(hass.states.async_all()) == 4 assert hass.states.get("switch.on_off_switch").state == STATE_ON assert hass.states.get("switch.smart_plug").state == STATE_OFF assert hass.states.get("switch.on_off_relay").state == STATE_ON assert hass.states.get("switch.unsupported_switch") is None - event_changed_light = { - "t": "event", - "e": "changed", - "r": "lights", - "id": "1", - "state": {"on": False}, - } - await mock_deconz_websocket(data=event_changed_light) - await hass.async_block_till_done() - + await light_ws_data({"state": {"on": False}}) assert hass.states.get("switch.on_off_switch").state == STATE_OFF # Verify service calls - mock_deconz_put_request(aioclient_mock, config_entry.data, "/lights/1/state") + aioclient_mock = mock_put_request("/lights/0/state") # Service turn on power plug @@ -107,44 +91,29 @@ async def test_power_plugs( ) assert aioclient_mock.mock_calls[2][2] == {"on": False} - await hass.config_entries.async_unload(config_entry.entry_id) - - states = hass.states.async_all() - assert len(states) == 4 - for state in states: - assert state.state == STATE_UNAVAILABLE - - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - assert len(hass.states.async_all()) == 0 - +@pytest.mark.parametrize( + "light_payload", + [ + { + "name": "On Off output device", + "type": "On/Off output", + "state": {"on": True, "reachable": True}, + "uniqueid": "00:00:00:00:00:00:00:00-00", + } + ], +) async def test_remove_legacy_on_off_output_as_light( hass: HomeAssistant, - aioclient_mock: AiohttpClientMocker, entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Test that switch platform cleans up legacy light entities.""" - unique_id = "00:00:00:00:00:00:00:00-00" - - switch_light_entity = entity_registry.async_get_or_create( - LIGHT_DOMAIN, DECONZ_DOMAIN, unique_id + assert entity_registry.async_get_or_create( + LIGHT_DOMAIN, DECONZ_DOMAIN, "00:00:00:00:00:00:00:00-00" ) - assert switch_light_entity - - data = { - "lights": { - "1": { - "name": "On Off output device", - "type": "On/Off output", - "state": {"on": True, "reachable": True}, - "uniqueid": unique_id, - }, - } - } - with patch.dict(DECONZ_WEB_REQUEST, data): - await setup_deconz_integration(hass, aioclient_mock) + await config_entry_factory() assert not entity_registry.async_get("light.on_off_output_device") assert entity_registry.async_get("switch.on_off_output_device") diff --git a/tests/components/demo/test_camera.py b/tests/components/demo/test_camera.py index 756609ed094..89dd8e0cdf7 100644 --- a/tests/components/demo/test_camera.py +++ b/tests/components/demo/test_camera.py @@ -1,9 +1,9 @@ """The tests for local file camera component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.camera import ( DOMAIN as CAMERA_DOMAIN, diff --git a/tests/components/demo/test_climate.py b/tests/components/demo/test_climate.py index 682b85f0845..383e00834b8 100644 --- a/tests/components/demo/test_climate.py +++ b/tests/components/demo/test_climate.py @@ -1,9 +1,9 @@ """The tests for the demo climate component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components.climate import ( diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index 7ee408d3bfc..009d2ca2f49 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -1,10 +1,10 @@ """The tests for the Demo cover platform.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, diff --git a/tests/components/demo/test_init.py b/tests/components/demo/test_init.py index 498a03600cb..0af15455949 100644 --- a/tests/components/demo/test_init.py +++ b/tests/components/demo/test_init.py @@ -1,10 +1,10 @@ """The tests for the Demo component.""" +from collections.abc import Generator import json from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/demo/test_light.py b/tests/components/demo/test_light.py index 5c2c478b0bf..e3b1efc7eec 100644 --- a/tests/components/demo/test_light.py +++ b/tests/components/demo/test_light.py @@ -1,9 +1,9 @@ """The tests for the demo light component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.components.light import ( diff --git a/tests/components/demo/test_notify.py b/tests/components/demo/test_notify.py index 4ebbfbdac04..98b3de8448a 100644 --- a/tests/components/demo/test_notify.py +++ b/tests/components/demo/test_notify.py @@ -1,9 +1,9 @@ """The tests for the notify demo platform.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components import notify from homeassistant.components.demo import DOMAIN @@ -81,6 +81,6 @@ async def test_calling_notify_from_script_loaded_from_yaml( await hass.services.async_call("script", "test") await hass.async_block_till_done() assert len(events) == 1 - assert { + assert events[0].data == { "message": "Test 123 4", - } == events[0].data + } diff --git a/tests/components/demo/test_number.py b/tests/components/demo/test_number.py index 37763b6e289..79885fa8581 100644 --- a/tests/components/demo/test_number.py +++ b/tests/components/demo/test_number.py @@ -1,9 +1,9 @@ """The tests for the demo number component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components.number import ( diff --git a/tests/components/demo/test_switch.py b/tests/components/demo/test_switch.py index 8b78171fd17..57384526dc0 100644 --- a/tests/components/demo/test_switch.py +++ b/tests/components/demo/test_switch.py @@ -1,9 +1,9 @@ """The tests for the demo switch component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.demo import DOMAIN from homeassistant.components.switch import ( diff --git a/tests/components/demo/test_text.py b/tests/components/demo/test_text.py index 3588330c75c..4ca172e5143 100644 --- a/tests/components/demo/test_text.py +++ b/tests/components/demo/test_text.py @@ -1,9 +1,9 @@ """The tests for the demo text component.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.text import ( ATTR_MAX, diff --git a/tests/components/device_automation/test_init.py b/tests/components/device_automation/test_init.py index b270d2ddd7a..750817f3c41 100644 --- a/tests/components/device_automation/test_init.py +++ b/tests/components/device_automation/test_init.py @@ -23,13 +23,7 @@ from homeassistant.loader import IntegrationNotFound from homeassistant.requirements import RequirementsNotFound from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - MockModule, - async_mock_service, - mock_integration, - mock_platform, -) +from tests.common import MockConfigEntry, MockModule, mock_integration, mock_platform from tests.typing import WebSocketGenerator @@ -1384,15 +1378,9 @@ async def test_automation_with_bad_condition( assert expected_error.format(path="['condition'][0]") in caplog.text -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_automation_with_sub_condition( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -1492,29 +1480,29 @@ async def test_automation_with_sub_condition( await hass.async_block_till_done() assert hass.states.get(entity_entry1.entity_id).state == STATE_ON assert hass.states.get(entity_entry2.entity_id).state == STATE_OFF - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "or event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_entry2.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "or event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "or event - test_event1" hass.states.async_set(entity_entry1.entity_id, STATE_ON) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 4 - assert [calls[2].data["some"], calls[3].data["some"]] == unordered( + assert len(service_calls) == 4 + assert [service_calls[2].data["some"], service_calls[3].data["some"]] == unordered( ["or event - test_event1", "and event - test_event1"] ) diff --git a/tests/components/device_automation/test_toggle_entity.py b/tests/components/device_automation/test_toggle_entity.py index f15730d9525..be4d3bd4c9e 100644 --- a/tests/components/device_automation/test_toggle_entity.py +++ b/tests/components/device_automation/test_toggle_entity.py @@ -11,7 +11,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed, async_mock_service +from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -19,17 +19,11 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing. @@ -121,20 +115,20 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -145,7 +139,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], trigger: str, ) -> None: """Test for triggers firing with delay.""" @@ -193,16 +187,16 @@ async def test_if_fires_on_state_change_with_for( ) await hass.async_block_till_done() assert hass.states.get(entry.entity_id).state == STATE_ON - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index 65afd5743f5..f3821eb5af9 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -77,11 +77,10 @@ async def scanner( ) await hass.async_block_till_done() - return scanner - +@pytest.mark.usefixtures("scanner") async def test_lights_on_when_sun_sets( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights go on when there is someone home and the sun sets.""" test_time = datetime(2017, 4, 5, 1, 2, 3, tzinfo=dt_util.UTC) @@ -136,8 +135,9 @@ async def test_lights_turn_off_when_everyone_leaves(hass: HomeAssistant) -> None ) +@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) @@ -172,8 +172,9 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( ) +@pytest.mark.usefixtures("scanner") async def test_lights_turn_on_when_coming_home_after_sun_set_person( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, scanner + hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" device_1 = f"{DOMAIN}.device_1" diff --git a/tests/components/device_tracker/test_config_entry.py b/tests/components/device_tracker/test_config_entry.py index 45b94012051..5b9ce78e4f5 100644 --- a/tests/components/device_tracker/test_config_entry.py +++ b/tests/components/device_tracker/test_config_entry.py @@ -1,9 +1,9 @@ """Test Device Tracker config entry things.""" +from collections.abc import Generator from typing import Any import pytest -from typing_extensions import Generator from homeassistant.components.device_tracker import ( ATTR_HOST_NAME, diff --git a/tests/components/device_tracker/test_device_condition.py b/tests/components/device_tracker/test_device_condition.py index 6ea4ed7a372..aff020d61a8 100644 --- a/tests/components/device_tracker/test_device_condition.py +++ b/tests/components/device_tracker/test_device_condition.py @@ -12,11 +12,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -24,12 +20,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +104,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -184,22 +174,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_home - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_home - event - test_event1" hass.states.async_set(entry.entity_id, "school") hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_not_home - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_not_home - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,5 +237,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_home - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_home - event - test_event1" diff --git a/tests/components/device_tracker/test_device_trigger.py b/tests/components/device_tracker/test_device_trigger.py index 4236e316424..ebff89e1a15 100644 --- a/tests/components/device_tracker/test_device_trigger.py +++ b/tests/components/device_tracker/test_device_trigger.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -36,12 +32,6 @@ HOME_LATITUDE = 32.880837 HOME_LONGITUDE = -117.237561 -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) def setup_zone(hass: HomeAssistant) -> None: """Create test zone.""" @@ -145,7 +135,7 @@ async def test_if_fires_on_zone_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -228,9 +218,9 @@ async def test_if_fires_on_zone_change( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) @@ -241,9 +231,9 @@ async def test_if_fires_on_zone_change( {"latitude": AWAY_LATITUDE, "longitude": AWAY_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"leave - device - {entry.entity_id} - -117.238 - -117.235" ) @@ -252,7 +242,7 @@ async def test_if_fires_on_zone_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for enter and leave triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -311,9 +301,9 @@ async def test_if_fires_on_zone_change_legacy( {"latitude": HOME_LATITUDE, "longitude": HOME_LONGITUDE}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"enter - device - {entry.entity_id} - -117.235 - -117.238" ) diff --git a/tests/components/device_tracker/test_init.py b/tests/components/device_tracker/test_init.py index cedf2a2f0bc..362258b035a 100644 --- a/tests/components/device_tracker/test_init.py +++ b/tests/components/device_tracker/test_init.py @@ -1,5 +1,6 @@ """The tests for the device tracker component.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import logging @@ -8,7 +9,6 @@ from types import ModuleType from unittest.mock import call, patch import pytest -from typing_extensions import Generator from homeassistant.components import device_tracker, zone from homeassistant.components.device_tracker import SourceType, const, legacy diff --git a/tests/components/devolo_home_control/conftest.py b/tests/components/devolo_home_control/conftest.py index 04752da5925..55e072d075c 100644 --- a/tests/components/devolo_home_control/conftest.py +++ b/tests/components/devolo_home_control/conftest.py @@ -1,9 +1,9 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/devolo_home_control/mocks.py b/tests/components/devolo_home_control/mocks.py index 02823871e0f..33c0a230e90 100644 --- a/tests/components/devolo_home_control/mocks.py +++ b/tests/components/devolo_home_control/mocks.py @@ -117,6 +117,7 @@ class DeviceMock(Zwave): self.uid = "Test" self.device_model_uid = "Test" self.device_type = "Test" + self.identifier = "MT01234" self.settings_property = {"general_device_settings": SettingsMock()} self.href = "https://www.mydevolo.com" diff --git a/tests/components/devolo_home_control/test_diagnostics.py b/tests/components/devolo_home_control/test_diagnostics.py index f52a9d49017..dfadc4d1c4b 100644 --- a/tests/components/devolo_home_control/test_diagnostics.py +++ b/tests/components/devolo_home_control/test_diagnostics.py @@ -5,6 +5,7 @@ from __future__ import annotations from unittest.mock import patch from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -35,4 +36,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/devolo_home_network/snapshots/test_init.ambr b/tests/components/devolo_home_network/snapshots/test_init.ambr index 8c265400643..619a8ce1121 100644 --- a/tests/components/devolo_home_network/snapshots/test_init.ambr +++ b/tests/components/devolo_home_network/snapshots/test_init.ambr @@ -25,6 +25,7 @@ }), 'manufacturer': 'devolo', 'model': 'dLAN pro 1200+ WiFi ac', + 'model_id': '2730', 'name': 'Mock Title', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/devolo_home_network/test_diagnostics.py b/tests/components/devolo_home_network/test_diagnostics.py index a3580cac954..05d3c594677 100644 --- a/tests/components/devolo_home_network/test_diagnostics.py +++ b/tests/components/devolo_home_network/test_diagnostics.py @@ -4,6 +4,7 @@ from __future__ import annotations import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -28,4 +29,4 @@ async def test_entry_diagnostics( assert entry.state is ConfigEntryState.LOADED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/discovergy/conftest.py b/tests/components/discovergy/conftest.py index 056f763c3e2..4f65099c1b4 100644 --- a/tests/components/discovergy/conftest.py +++ b/tests/components/discovergy/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Discovergy integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from pydiscovergy.models import Reading import pytest -from typing_extensions import Generator from homeassistant.components.discovergy.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/dlink/conftest.py b/tests/components/dlink/conftest.py index 4bbf99000a9..c56b93c4d3d 100644 --- a/tests/components/dlink/conftest.py +++ b/tests/components/dlink/conftest.py @@ -1,11 +1,10 @@ """Configure pytest for D-Link tests.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from copy import deepcopy from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import dhcp from homeassistant.components.dlink.const import CONF_USE_LEGACY_PROTOCOL, DOMAIN diff --git a/tests/components/dlna_dmr/conftest.py b/tests/components/dlna_dmr/conftest.py index f470fbabc6f..21cb2bc0daf 100644 --- a/tests/components/dlna_dmr/conftest.py +++ b/tests/components/dlna_dmr/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator from socket import AddressFamily # pylint: disable=no-name-in-module from unittest.mock import Mock, create_autospec, patch, seal @@ -32,7 +32,7 @@ NEW_DEVICE_LOCATION = "http://198.51.100.7" + "/dmr_description.xml" @pytest.fixture -def domain_data_mock(hass: HomeAssistant) -> Iterable[Mock]: +def domain_data_mock(hass: HomeAssistant) -> Mock: """Mock the global data used by this component. This includes network clients and library object factories. Mocking it @@ -114,7 +114,7 @@ def config_entry_mock_no_mac() -> MockConfigEntry: @pytest.fixture -def dmr_device_mock(domain_data_mock: Mock) -> Iterable[Mock]: +def dmr_device_mock(domain_data_mock: Mock) -> Generator[Mock]: """Mock the async_upnp_client DMR device, initially connected.""" with patch( "homeassistant.components.dlna_dmr.media_player.DmrDevice", autospec=True @@ -135,7 +135,7 @@ def dmr_device_mock(domain_data_mock: Mock) -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Iterable[Mock]: +def ssdp_scanner_mock() -> Generator[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -144,14 +144,14 @@ def ssdp_scanner_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Iterable[Mock]: +def ssdp_server_mock() -> Generator[None]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @pytest.fixture(autouse=True) -def async_get_local_ip_mock() -> Iterable[Mock]: +def async_get_local_ip_mock() -> Generator[Mock]: """Mock the async_get_local_ip utility function to prevent network access.""" with patch( "homeassistant.components.dlna_dmr.media_player.async_get_local_ip", diff --git a/tests/components/dlna_dmr/test_config_flow.py b/tests/components/dlna_dmr/test_config_flow.py index a91cd4744d9..d60a8f17b83 100644 --- a/tests/components/dlna_dmr/test_config_flow.py +++ b/tests/components/dlna_dmr/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator import dataclasses import logging from unittest.mock import Mock, patch @@ -89,7 +89,7 @@ MOCK_DISCOVERY = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_get_mac_address() -> Iterable[Mock]: +def mock_get_mac_address() -> Generator[Mock]: """Mock the get_mac_address function to prevent network access and assist tests.""" with patch( "homeassistant.components.dlna_dmr.config_flow.get_mac_address", autospec=True @@ -99,7 +99,7 @@ def mock_get_mac_address() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def mock_setup_entry() -> Iterable[Mock]: +def mock_setup_entry() -> Generator[Mock]: """Mock async_setup_entry.""" with patch( "homeassistant.components.dlna_dmr.async_setup_entry", return_value=True diff --git a/tests/components/dlna_dmr/test_data.py b/tests/components/dlna_dmr/test_data.py index 57652747ffd..e67a559f934 100644 --- a/tests/components/dlna_dmr/test_data.py +++ b/tests/components/dlna_dmr/test_data.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator from unittest.mock import ANY, Mock, patch from async_upnp_client.aiohttp import AiohttpNotifyServer @@ -16,7 +16,7 @@ from homeassistant.core import Event, HomeAssistant @pytest.fixture -def aiohttp_notify_servers_mock() -> Iterable[Mock]: +def aiohttp_notify_servers_mock() -> Generator[Mock]: """Construct mock AiohttpNotifyServer on demand, eliminating network use. This fixture provides a list of the constructed servers. diff --git a/tests/components/dlna_dmr/test_media_player.py b/tests/components/dlna_dmr/test_media_player.py index d202994f988..3d8f9da8ed9 100644 --- a/tests/components/dlna_dmr/test_media_player.py +++ b/tests/components/dlna_dmr/test_media_player.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncIterable, Mapping +from collections.abc import AsyncGenerator, Mapping from dataclasses import dataclass from datetime import timedelta from typing import Any @@ -95,7 +95,7 @@ async def mock_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncIterable[str]: +) -> AsyncGenerator[str]: """Fixture to set up a mock DlnaDmrEntity in a connected state. Yields the entity ID. Cleans up the entity after the test is complete. @@ -145,7 +145,7 @@ async def mock_disconnected_entity_id( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dmr_device_mock: Mock, -) -> AsyncIterable[str]: +) -> AsyncGenerator[str]: """Fixture to set up a mock DlnaDmrEntity in a disconnected state. Yields the entity ID. Cleans up the entity after the test is complete. diff --git a/tests/components/dlna_dms/conftest.py b/tests/components/dlna_dms/conftest.py index ed05dfa4c76..eb10babf527 100644 --- a/tests/components/dlna_dms/conftest.py +++ b/tests/components/dlna_dms/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import AsyncIterable, Iterable +from collections.abc import AsyncGenerator, Generator from typing import Final, cast from unittest.mock import AsyncMock, MagicMock, Mock, create_autospec, patch, seal @@ -44,7 +44,7 @@ async def setup_media_source(hass: HomeAssistant) -> None: @pytest.fixture -def upnp_factory_mock() -> Iterable[Mock]: +def upnp_factory_mock() -> Generator[Mock]: """Mock the UpnpFactory class to construct DMS-style UPnP devices.""" with patch( "homeassistant.components.dlna_dms.dms.UpnpFactory", @@ -82,7 +82,7 @@ def upnp_factory_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True, scope="module") -def aiohttp_session_requester_mock() -> Iterable[Mock]: +def aiohttp_session_requester_mock() -> Generator[Mock]: """Mock the AiohttpSessionRequester to prevent network use.""" with patch( "homeassistant.components.dlna_dms.dms.AiohttpSessionRequester", autospec=True @@ -109,7 +109,7 @@ def config_entry_mock() -> MockConfigEntry: @pytest.fixture -def dms_device_mock(upnp_factory_mock: Mock) -> Iterable[Mock]: +def dms_device_mock(upnp_factory_mock: Mock) -> Generator[Mock]: """Mock the async_upnp_client DMS device, initially connected.""" with patch( "homeassistant.components.dlna_dms.dms.DmsDevice", autospec=True @@ -130,7 +130,7 @@ def dms_device_mock(upnp_factory_mock: Mock) -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_scanner_mock() -> Iterable[Mock]: +def ssdp_scanner_mock() -> Generator[Mock]: """Mock the SSDP Scanner.""" with patch("homeassistant.components.ssdp.Scanner", autospec=True) as mock_scanner: reg_callback = mock_scanner.return_value.async_register_callback @@ -139,7 +139,7 @@ def ssdp_scanner_mock() -> Iterable[Mock]: @pytest.fixture(autouse=True) -def ssdp_server_mock() -> Iterable[Mock]: +def ssdp_server_mock() -> Generator[None]: """Mock the SSDP Server.""" with patch("homeassistant.components.ssdp.Server", autospec=True): yield @@ -151,7 +151,7 @@ async def device_source_mock( config_entry_mock: MockConfigEntry, ssdp_scanner_mock: Mock, dms_device_mock: Mock, -) -> AsyncIterable[None]: +) -> AsyncGenerator[None]: """Fixture to set up a DmsDeviceSource in a connected state and cleanup at completion.""" config_entry_mock.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry_mock.entry_id) diff --git a/tests/components/dlna_dms/test_config_flow.py b/tests/components/dlna_dms/test_config_flow.py index b61b4a42c49..14da36a0381 100644 --- a/tests/components/dlna_dms/test_config_flow.py +++ b/tests/components/dlna_dms/test_config_flow.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Iterable +from collections.abc import Generator import dataclasses import logging from typing import Final @@ -68,7 +68,7 @@ MOCK_DISCOVERY: Final = ssdp.SsdpServiceInfo( @pytest.fixture(autouse=True) -def mock_setup_entry() -> Iterable[Mock]: +def mock_setup_entry() -> Generator[Mock]: """Avoid setting up the entire integration.""" with patch( "homeassistant.components.dlna_dms.async_setup_entry", diff --git a/tests/components/doorbird/__init__.py b/tests/components/doorbird/__init__.py index 57bf4c04e39..2d517dfcefe 100644 --- a/tests/components/doorbird/__init__.py +++ b/tests/components/doorbird/__init__.py @@ -1 +1,85 @@ """Tests for the DoorBird integration.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock + +import aiohttp +from doorbirdpy import DoorBird, DoorBirdScheduleEntry + +from homeassistant import config_entries +from homeassistant.components.doorbird.const import API_URL +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) + +VALID_CONFIG = { + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "friend", + CONF_PASSWORD: "password", + CONF_NAME: "mydoorbird", +} + + +def _get_aiohttp_client_error(status: int) -> aiohttp.ClientResponseError: + """Return a mock aiohttp client response error.""" + return aiohttp.ClientResponseError( + request_info=Mock(), + history=Mock(), + status=status, + ) + + +def mock_unauthorized_exception() -> aiohttp.ClientResponseError: + """Return a mock unauthorized exception.""" + return _get_aiohttp_client_error(401) + + +def mock_not_found_exception() -> aiohttp.ClientResponseError: + """Return a mock not found exception.""" + return _get_aiohttp_client_error(404) + + +def get_mock_doorbird_api( + info: dict[str, Any] | None = None, + info_side_effect: Exception | None = None, + schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, + favorites: dict[str, dict[str, Any]] | None = None, + favorites_side_effect: Exception | None = None, + change_schedule: tuple[bool, int] | None = None, +) -> DoorBird: + """Return a mock DoorBirdAPI object with return values.""" + doorbirdapi_mock = MagicMock(spec_set=DoorBird) + api_mock_type = type(doorbirdapi_mock) + api_mock_type.info = AsyncMock(side_effect=info_side_effect, return_value=info) + api_mock_type.favorites = AsyncMock( + side_effect=favorites_side_effect, return_value=favorites + ) + api_mock_type.change_favorite = AsyncMock(return_value=True) + api_mock_type.change_schedule = AsyncMock( + return_value=change_schedule or (True, 200) + ) + api_mock_type.schedule = AsyncMock( + return_value=schedule, side_effect=schedule_side_effect + ) + api_mock_type.energize_relay = AsyncMock(return_value=True) + api_mock_type.turn_light_on = AsyncMock(return_value=True) + api_mock_type.delete_favorite = AsyncMock(return_value=True) + api_mock_type.get_image = AsyncMock(return_value=b"image") + api_mock_type.doorbell_state = AsyncMock(side_effect=mock_unauthorized_exception()) + return doorbirdapi_mock + + +async def mock_webhook_call( + config_entry: config_entries.ConfigEntry, + aiohttp_client: aiohttp.ClientSession, + event: str, +) -> None: + """Mock the webhook call.""" + token = config_entry.data.get(CONF_TOKEN, config_entry.entry_id) + response = await aiohttp_client.get(f"{API_URL}/{event}?token={token}") + response.raise_for_status() diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py new file mode 100644 index 00000000000..2e367e4e1d8 --- /dev/null +++ b/tests/components/doorbird/conftest.py @@ -0,0 +1,133 @@ +"""Test configuration for DoorBird tests.""" + +from collections.abc import Callable, Coroutine, Generator +from contextlib import contextmanager +from dataclasses import dataclass +from typing import Any +from unittest.mock import MagicMock, patch + +from doorbirdpy import DoorBird, DoorBirdScheduleEntry +import pytest + +from homeassistant.components.doorbird.const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, +) +from homeassistant.core import HomeAssistant + +from . import VALID_CONFIG, get_mock_doorbird_api + +from tests.common import MockConfigEntry, load_json_value_fixture + +type DoorbirdMockerType = Callable[[], Coroutine[Any, Any, MockDoorbirdEntry]] + + +@dataclass +class MockDoorbirdEntry: + """Mock DoorBird config entry.""" + + entry: MockConfigEntry + api: MagicMock + + +@pytest.fixture(scope="session") +def doorbird_info() -> dict[str, Any]: + """Return a loaded DoorBird info fixture.""" + return load_json_value_fixture("info.json", "doorbird")["BHA"]["VERSION"][0] + + +@pytest.fixture(scope="session") +def doorbird_schedule() -> list[DoorBirdScheduleEntry]: + """Return a loaded DoorBird schedule fixture.""" + return DoorBirdScheduleEntry.parse_all( + load_json_value_fixture("schedule.json", "doorbird") + ) + + +@pytest.fixture(scope="session") +def doorbird_schedule_wrong_param() -> list[DoorBirdScheduleEntry]: + """Return a loaded DoorBird schedule fixture with an incorrect param.""" + return DoorBirdScheduleEntry.parse_all( + load_json_value_fixture("schedule_wrong_param.json", "doorbird") + ) + + +@pytest.fixture(scope="session") +def doorbird_favorites() -> dict[str, dict[str, Any]]: + """Return a loaded DoorBird favorites fixture.""" + return load_json_value_fixture("favorites.json", "doorbird") + + +@pytest.fixture +def doorbird_api( + doorbird_info: dict[str, Any], doorbird_schedule: dict[str, Any] +) -> Generator[DoorBird]: + """Mock the DoorBirdAPI.""" + api = get_mock_doorbird_api(info=doorbird_info, schedule=doorbird_schedule) + with patch_doorbird_api_entry_points(api): + yield api + + +@contextmanager +def patch_doorbird_api_entry_points(api: MagicMock) -> Generator[DoorBird]: + """Mock the DoorBirdAPI.""" + with ( + patch( + "homeassistant.components.doorbird.DoorBird", + return_value=api, + ), + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=api, + ), + ): + yield api + + +@pytest.fixture +async def doorbird_mocker( + hass: HomeAssistant, + doorbird_info: dict[str, Any], + doorbird_schedule: dict[str, Any], + doorbird_favorites: dict[str, dict[str, Any]], +) -> DoorbirdMockerType: + """Create a MockDoorbirdEntry.""" + + async def _async_mock( + entry: MockConfigEntry | None = None, + api: DoorBird | None = None, + change_schedule: tuple[bool, int] | None = None, + info: dict[str, Any] | None = None, + info_side_effect: Exception | None = None, + schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, + favorites: dict[str, dict[str, Any]] | None = None, + favorites_side_effect: Exception | None = None, + options: dict[str, Any] | None = None, + ) -> MockDoorbirdEntry: + """Create a MockDoorbirdEntry from defaults or specific values.""" + entry = entry or MockConfigEntry( + domain=DOMAIN, + unique_id="1CCAE3AAAAAA", + data=VALID_CONFIG, + options=options + or {CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT]}, + ) + api = api or get_mock_doorbird_api( + info=info or doorbird_info, + info_side_effect=info_side_effect, + schedule=schedule or doorbird_schedule, + schedule_side_effect=schedule_side_effect, + favorites=favorites or doorbird_favorites, + favorites_side_effect=favorites_side_effect, + change_schedule=change_schedule, + ) + entry.add_to_hass(hass) + with patch_doorbird_api_entry_points(api): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return MockDoorbirdEntry(entry=entry, api=api) + + return _async_mock diff --git a/tests/components/doorbird/fixtures/favorites.json b/tests/components/doorbird/fixtures/favorites.json new file mode 100644 index 00000000000..c56f79c0300 --- /dev/null +++ b/tests/components/doorbird/fixtures/favorites.json @@ -0,0 +1,12 @@ +{ + "http": { + "0": { + "title": "Home Assistant (mydoorbird_doorbell)", + "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_doorbell?token=01J2F4B97Y7P1SARXEJ6W07EKD" + }, + "1": { + "title": "Home Assistant (mydoorbird_motion)", + "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD" + } + } +} diff --git a/tests/components/doorbird/fixtures/info.json b/tests/components/doorbird/fixtures/info.json new file mode 100644 index 00000000000..46fb8fbac86 --- /dev/null +++ b/tests/components/doorbird/fixtures/info.json @@ -0,0 +1,23 @@ +{ + "BHA": { + "RETURNCODE": "1", + "VERSION": [ + { + "FIRMWARE": "000125", + "BUILD_NUMBER": "15870439", + "WIFI_MAC_ADDR": "1234ABCD", + "RELAYS": [ + "1", + "2", + "ghchdi@1", + "ghchdi@2", + "ghchdi@3", + "ghdwkh@1", + "ghdwkh@2", + "ghdwkh@3" + ], + "DEVICE-TYPE": "DoorBird D2101V" + } + ] + } +} diff --git a/tests/components/doorbird/fixtures/schedule.json b/tests/components/doorbird/fixtures/schedule.json new file mode 100644 index 00000000000..c300180777c --- /dev/null +++ b/tests/components/doorbird/fixtures/schedule.json @@ -0,0 +1,67 @@ +[ + { + "input": "doorbell", + "param": "1", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "0", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "motion", + "param": "", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "5", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "relay", + "param": "1", + "output": [] + } +] diff --git a/tests/components/doorbird/fixtures/schedule_wrong_param.json b/tests/components/doorbird/fixtures/schedule_wrong_param.json new file mode 100644 index 00000000000..724f19b1774 --- /dev/null +++ b/tests/components/doorbird/fixtures/schedule_wrong_param.json @@ -0,0 +1,67 @@ +[ + { + "input": "doorbell", + "param": "99", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "0", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "motion", + "param": "", + "output": [ + { + "event": "notify", + "param": "", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + }, + { + "event": "http", + "param": "5", + "schedule": { + "weekdays": [ + { + "to": "107999", + "from": "108000" + } + ] + } + } + ] + }, + { + "input": "relay", + "param": "1", + "output": [] + } +] diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py new file mode 100644 index 00000000000..2131e3d6133 --- /dev/null +++ b/tests/components/doorbird/test_button.py @@ -0,0 +1,52 @@ +"""Test DoorBird buttons.""" + +from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from .conftest import DoorbirdMockerType + + +async def test_relay_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing a relay button.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "button.mydoorbird_relay_1" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True + ) + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.energize_relay.call_count == 1 + + +async def test_ir_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing the IR button.""" + doorbird_entry = await doorbird_mocker() + ir_entity_id = "button.mydoorbird_ir" + assert hass.states.get(ir_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True + ) + assert hass.states.get(ir_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.turn_light_on.call_count == 1 + + +async def test_reset_favorites_button( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test pressing the reset favorites button.""" + doorbird_entry = await doorbird_mocker() + reset_entity_id = "button.mydoorbird_reset_favorites" + assert hass.states.get(reset_entity_id).state == STATE_UNKNOWN + await hass.services.async_call( + DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True + ) + assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN + assert doorbird_entry.api.delete_favorite.call_count == 2 diff --git a/tests/components/doorbird/test_camera.py b/tests/components/doorbird/test_camera.py new file mode 100644 index 00000000000..228a6c81daa --- /dev/null +++ b/tests/components/doorbird/test_camera.py @@ -0,0 +1,46 @@ +"""Test DoorBird cameras.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.camera import ( + STATE_IDLE, + async_get_image, + async_get_stream_source, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + + +async def test_doorbird_cameras( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the doorbird cameras.""" + doorbird_entry = await doorbird_mocker() + live_camera_entity_id = "camera.mydoorbird_live" + assert hass.states.get(live_camera_entity_id).state == STATE_IDLE + last_motion_camera_entity_id = "camera.mydoorbird_last_motion" + assert hass.states.get(last_motion_camera_entity_id).state == STATE_IDLE + last_ring_camera_entity_id = "camera.mydoorbird_last_ring" + assert hass.states.get(last_ring_camera_entity_id).state == STATE_IDLE + assert await async_get_stream_source(hass, live_camera_entity_id) is not None + api = doorbird_entry.api + api.get_image.side_effect = mock_not_found_exception() + with pytest.raises(HomeAssistantError): + await async_get_image(hass, live_camera_entity_id) + api.get_image.side_effect = TimeoutError() + with pytest.raises(HomeAssistantError): + await async_get_image(hass, live_camera_entity_id) + api.get_image.side_effect = None + assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" + api.get_image.return_value = b"notyet" + # Ensure rate limit works + assert (await async_get_image(hass, live_camera_entity_id)).content == b"image" + + freezer.tick(60) + assert (await async_get_image(hass, live_camera_entity_id)).content == b"notyet" diff --git a/tests/components/doorbird/test_config_flow.py b/tests/components/doorbird/test_config_flow.py index cd4ddccda87..3abdd2b87a3 100644 --- a/tests/components/doorbird/test_config_flow.py +++ b/tests/components/doorbird/test_config_flow.py @@ -1,47 +1,35 @@ """Test the DoorBird config flow.""" from ipaddress import ip_address -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import AsyncMock, Mock, patch +import aiohttp +from doorbirdpy import DoorBird import pytest -import requests from homeassistant import config_entries from homeassistant.components import zeroconf -from homeassistant.components.doorbird.const import CONF_EVENTS, DOMAIN +from homeassistant.components.doorbird.const import ( + CONF_EVENTS, + DEFAULT_DOORBELL_EVENT, + DEFAULT_MOTION_EVENT, + DOMAIN, +) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import ( + VALID_CONFIG, + get_mock_doorbird_api, + mock_not_found_exception, + mock_unauthorized_exception, +) + from tests.common import MockConfigEntry -VALID_CONFIG = { - CONF_HOST: "1.2.3.4", - CONF_USERNAME: "friend", - CONF_PASSWORD: "password", - CONF_NAME: "mydoorbird", -} - -def _get_mock_doorbirdapi_return_values(ready=None, info=None): - doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(return_value=ready) - type(doorbirdapi_mock).info = MagicMock(return_value=info) - type(doorbirdapi_mock).doorbell_state = MagicMock( - side_effect=requests.exceptions.HTTPError(response=Mock(status_code=401)) - ) - return doorbirdapi_mock - - -def _get_mock_doorbirdapi_side_effects(ready=None, info=None): - doorbirdapi_mock = MagicMock() - type(doorbirdapi_mock).ready = MagicMock(side_effect=ready) - type(doorbirdapi_mock).info = MagicMock(side_effect=info) - - return doorbirdapi_mock - - -async def test_user_form(hass: HomeAssistant) -> None: +async def test_user_form(hass: HomeAssistant, doorbird_api: DoorBird) -> None: """Test we get the user form.""" result = await hass.config_entries.flow.async_init( @@ -50,14 +38,7 @@ async def test_user_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) with ( - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ), patch( "homeassistant.components.doorbird.async_setup", return_value=True ) as mock_setup, @@ -80,6 +61,9 @@ async def test_user_form(hass: HomeAssistant) -> None: "password": "password", "username": "friend", } + assert result2["options"] == { + CONF_EVENTS: [DEFAULT_DOORBELL_EVENT, DEFAULT_MOTION_EVENT] + } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -172,39 +156,30 @@ async def test_form_zeroconf_non_ipv4_ignored(hass: HomeAssistant) -> None: assert result["reason"] == "not_ipv4_address" -async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: +async def test_form_zeroconf_correct_oui( + hass: HomeAssistant, doorbird_api: DoorBird +) -> None: """Test we can setup from zeroconf with the correct OUI source.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) - with patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.5"), - ip_addresses=[ip_address("192.168.1.5")], - hostname="mock_hostname", - name="Doorstation - abc123._axis-video._tcp.local.", - port=None, - properties={"macaddress": "1CCAE3DOORBIRD"}, - type="mock_type", - ), - ) - await hass.async_block_till_done() + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("192.168.1.5"), + ip_addresses=[ip_address("192.168.1.5")], + hostname="mock_hostname", + name="Doorstation - abc123._axis-video._tcp.local.", + port=None, + properties={"macaddress": "1CCAE3DOORBIRD"}, + type="mock_type", + ), + ) + await hass.async_block_till_done() assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} with ( - patch( - "homeassistant.components.doorbird.config_flow.DoorBird", - return_value=doorbirdapi, - ), patch("homeassistant.components.logbook.async_setup", return_value=True), patch( "homeassistant.components.doorbird.async_setup", return_value=True @@ -234,19 +209,19 @@ async def test_form_zeroconf_correct_oui(hass: HomeAssistant) -> None: @pytest.mark.parametrize( "doorbell_state_side_effect", [ - requests.exceptions.HTTPError(response=Mock(status_code=404)), + aiohttp.ClientResponseError(request_info=Mock(), history=Mock(), status=404), OSError, None, ], ) async def test_form_zeroconf_correct_oui_wrong_device( - hass: HomeAssistant, doorbell_state_side_effect + hass: HomeAssistant, + doorbird_api: DoorBird, + doorbell_state_side_effect: Exception | None, ) -> None: """Test we can setup from zeroconf with the correct OUI source but not a doorstation.""" - doorbirdapi = _get_mock_doorbirdapi_return_values( - ready=[True], info={"WIFI_MAC_ADDR": "macaddr"} - ) - type(doorbirdapi).doorbell_state = MagicMock(side_effect=doorbell_state_side_effect) + doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) + type(doorbirdapi).doorbell_state = AsyncMock(side_effect=doorbell_state_side_effect) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", @@ -276,7 +251,7 @@ async def test_form_user_cannot_connect(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=OSError) + doorbirdapi = get_mock_doorbird_api(info_side_effect=OSError) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -296,8 +271,8 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_error = requests.exceptions.HTTPError(response=Mock(status_code=401)) - doorbirdapi = _get_mock_doorbirdapi_side_effects(ready=mock_error) + mock_error = mock_unauthorized_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) with patch( "homeassistant.components.doorbird.config_flow.DoorBird", return_value=doorbirdapi, @@ -311,6 +286,100 @@ async def test_form_user_invalid_auth(hass: HomeAssistant) -> None: assert result2["errors"] == {"base": "invalid_auth"} +async def test_form_user_doorbird_not_found( + doorbird_api: DoorBird, hass: HomeAssistant +) -> None: + """Test handling unable to connect to the device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + mock_error = mock_not_found_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + + with ( + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], VALID_CONFIG + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "1.2.3.4" + assert result3["data"] == { + "host": "1.2.3.4", + "name": "mydoorbird", + "password": "password", + "username": "friend", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_user_doorbird_unknown_exception( + doorbird_api: DoorBird, hass: HomeAssistant +) -> None: + """Test handling unable an unknown exception.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + doorbirdapi = get_mock_doorbird_api(info_side_effect=ValueError) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "unknown"} + + with ( + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], VALID_CONFIG + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "1.2.3.4" + assert result3["data"] == { + "host": "1.2.3.4", + "name": "mydoorbird", + "password": "password", + "username": "friend", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + async def test_options_flow(hass: HomeAssistant) -> None: """Test config flow options.""" @@ -336,3 +405,67 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY assert config_entry.options == {CONF_EVENTS: ["eventa", "eventc", "eventq"]} + + +async def test_reauth(hass: HomeAssistant) -> None: + """Test reauth flow.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "DoorBird", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + config_entry.add_to_hass(hass) + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) + assert len(flows) == 1 + flow = flows[0] + + mock_error = mock_unauthorized_exception() + doorbirdapi = get_mock_doorbird_api(info_side_effect=mock_error) + with patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + + doorbirdapi = get_mock_doorbird_api(info={"WIFI_MAC_ADDR": "macaddr"}) + with ( + patch( + "homeassistant.components.doorbird.config_flow.DoorBird", + return_value=doorbirdapi, + ), + patch( + "homeassistant.components.doorbird.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.doorbird.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + flow["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_setup.mock_calls) == 1 diff --git a/tests/components/doorbird/test_device.py b/tests/components/doorbird/test_device.py new file mode 100644 index 00000000000..cf3beae5e68 --- /dev/null +++ b/tests/components/doorbird/test_device.py @@ -0,0 +1,59 @@ +"""Test DoorBird device.""" + +from copy import deepcopy +from http import HTTPStatus + +from doorbirdpy import DoorBirdScheduleEntry +import pytest + +from homeassistant.components.doorbird.const import CONF_EVENTS +from homeassistant.core import HomeAssistant + +from .conftest import DoorbirdMockerType + + +async def test_no_configured_events( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbird with no events configured.""" + await doorbird_mocker(options={CONF_EVENTS: []}) + assert not hass.states.async_all("event") + + +async def test_change_schedule_success( + doorbird_mocker: DoorbirdMockerType, + doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a doorbird when change_schedule fails.""" + schedule_copy = deepcopy(doorbird_schedule_wrong_param) + mock_doorbird = await doorbird_mocker(schedule=schedule_copy) + assert "Unable to update schedule entry mydoorbird" not in caplog.text + assert mock_doorbird.api.change_schedule.call_count == 1 + new_schedule: list[DoorBirdScheduleEntry] = ( + mock_doorbird.api.change_schedule.call_args[0] + ) + # Ensure the attempt to update the schedule to fix the incorrect + # param is made + assert new_schedule[-1].output[-1].param == "1" + + +async def test_change_schedule_fails( + doorbird_mocker: DoorbirdMockerType, + doorbird_schedule_wrong_param: list[DoorBirdScheduleEntry], + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a doorbird when change_schedule fails.""" + schedule_copy = deepcopy(doorbird_schedule_wrong_param) + mock_doorbird = await doorbird_mocker( + schedule=schedule_copy, change_schedule=(False, HTTPStatus.UNAUTHORIZED) + ) + assert "Unable to update schedule entry mydoorbird" in caplog.text + assert mock_doorbird.api.change_schedule.call_count == 1 + new_schedule: list[DoorBirdScheduleEntry] = ( + mock_doorbird.api.change_schedule.call_args[0] + ) + # Ensure the attempt to update the schedule to fix the incorrect + # param is made + assert new_schedule[-1].output[-1].param == "1" diff --git a/tests/components/doorbird/test_event.py b/tests/components/doorbird/test_event.py new file mode 100644 index 00000000000..11e0f3a306d --- /dev/null +++ b/tests/components/doorbird/test_event.py @@ -0,0 +1,37 @@ +"""Test DoorBird events.""" + +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import mock_webhook_call +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_doorbell_ring_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbell ring event.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "event.mydoorbird_doorbell" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + client = await hass_client() + await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_doorbell") + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN + + +async def test_motion_event( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test a doorbell motion event.""" + doorbird_entry = await doorbird_mocker() + relay_1_entity_id = "event.mydoorbird_motion" + assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN + client = await hass_client() + await mock_webhook_call(doorbird_entry.entry, client, "mydoorbird_motion") + assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN diff --git a/tests/components/doorbird/test_init.py b/tests/components/doorbird/test_init.py new file mode 100644 index 00000000000..31266c4acf0 --- /dev/null +++ b/tests/components/doorbird/test_init.py @@ -0,0 +1,93 @@ +"""Test DoorBird init.""" + +import pytest + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import mock_not_found_exception, mock_unauthorized_exception +from .conftest import DoorbirdMockerType + + +async def test_basic_setup( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup.""" + doorbird_entry = await doorbird_mocker() + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.LOADED + + +async def test_auth_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup with an auth failure.""" + doorbird_entry = await doorbird_mocker( + info_side_effect=mock_unauthorized_exception() + ) + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress(DOMAIN) + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + +@pytest.mark.parametrize( + "side_effect", + [OSError, mock_not_found_exception()], +) +async def test_http_info_request_fails( + doorbird_mocker: DoorbirdMockerType, side_effect: Exception +) -> None: + """Test basic setup with an http failure.""" + doorbird_entry = await doorbird_mocker(info_side_effect=side_effect) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_http_favorites_request_fails( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup with an http failure.""" + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_http_schedule_api_missing( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test missing the schedule API is non-fatal as not all models support it.""" + doorbird_entry = await doorbird_mocker( + schedule_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.LOADED + + +async def test_events_changed( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test basic setup.""" + doorbird_entry = await doorbird_mocker() + entry = doorbird_entry.entry + assert entry.state is ConfigEntryState.LOADED + api = doorbird_entry.api + api.favorites.reset_mock() + api.change_favorite.reset_mock() + api.schedule.reset_mock() + + hass.config_entries.async_update_entry(entry, options={"events": ["xyz"]}) + await hass.async_block_till_done() + assert len(api.favorites.mock_calls) == 2 + assert len(api.schedule.mock_calls) == 1 + + assert len(api.change_favorite.mock_calls) == 1 + favorite_type, title, url = api.change_favorite.mock_calls[0][1] + assert favorite_type == "http" + assert title == "Home Assistant (mydoorbird_xyz)" + assert url == ( + f"http://10.10.10.10:8123/api/doorbird/mydoorbird_xyz?token={entry.entry_id}" + ) diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py new file mode 100644 index 00000000000..7449250b718 --- /dev/null +++ b/tests/components/doorbird/test_repairs.py @@ -0,0 +1,61 @@ +"""Test repairs for doorbird.""" + +from __future__ import annotations + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_change_schedule_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + hass_client: ClientSessionGenerator, +) -> None: + """Test a doorbird when change_schedule fails.""" + assert await async_setup_component(hass, "repairs", {}) + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + issue_reg = ir.async_get(hass) + assert len(issue_reg.issues) == 1 + issue = list(issue_reg.issues.values())[0] + issue_id = issue.issue_id + assert issue.domain == DOMAIN + + await async_process_repairs_platforms(hass) + client = await hass_client() + + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + placeholders = data["description_placeholders"] + assert "404" in placeholders["error"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" diff --git a/tests/components/doorbird/test_view.py b/tests/components/doorbird/test_view.py new file mode 100644 index 00000000000..9d2b53714b6 --- /dev/null +++ b/tests/components/doorbird/test_view.py @@ -0,0 +1,21 @@ +"""Test DoorBird view.""" + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import API_URL + +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_non_webhook_with_wrong_token( + hass_client: ClientSessionGenerator, + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test calling the webhook with the wrong token.""" + await doorbird_mocker() + client = await hass_client() + + response = await client.get(f"{API_URL}/doorbell?token=wrong") + assert response.status == HTTPStatus.UNAUTHORIZED diff --git a/tests/components/drop_connect/test_sensor.py b/tests/components/drop_connect/test_sensor.py index 4873d1edbd1..cb56522a09d 100644 --- a/tests/components/drop_connect/test_sensor.py +++ b/tests/components/drop_connect/test_sensor.py @@ -47,7 +47,7 @@ from tests.typing import MqttMockHAClient @pytest.fixture(autouse=True) -def only_sensor_platform() -> Generator[[], None]: +def only_sensor_platform() -> Generator[None]: """Only setup the DROP sensor platform.""" with patch("homeassistant.components.drop_connect.PLATFORMS", [Platform.SENSOR]): yield diff --git a/tests/components/dsmr/conftest.py b/tests/components/dsmr/conftest.py index 2257b8414a6..2301b9dfc80 100644 --- a/tests/components/dsmr/conftest.py +++ b/tests/components/dsmr/conftest.py @@ -1,6 +1,7 @@ """Common test tools.""" import asyncio +from collections.abc import Generator from unittest.mock import MagicMock, patch from dsmr_parser.clients.protocol import DSMRProtocol @@ -15,7 +16,6 @@ from dsmr_parser.obis_references import ( ) from dsmr_parser.objects import CosemObject import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/dsmr/snapshots/test_diagnostics.ambr b/tests/components/dsmr/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..ec2dc274efa --- /dev/null +++ b/tests/components/dsmr/snapshots/test_diagnostics.ambr @@ -0,0 +1,29 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'data': dict({ + 'CURRENT_ELECTRICITY_USAGE': dict({ + 'unit': 'W', + 'value': 0.0, + }), + 'ELECTRICITY_ACTIVE_TARIFF': dict({ + 'unit': '', + 'value': '0001', + }), + 'GAS_METER_READING': dict({ + 'datetime': '2019-03-03T19:43:33+00:00', + 'unit': 'm³', + 'value': 745.695, + }), + }), + 'entry': dict({ + 'data': dict({ + 'dsmr_version': '2.2', + 'port': '/dev/ttyUSB0', + 'serial_id': '1234', + 'serial_id_gas': '5678', + }), + 'unique_id': '/dev/ttyUSB0', + }), + }) +# --- diff --git a/tests/components/dsmr/test_config_flow.py b/tests/components/dsmr/test_config_flow.py index 3b4dc533993..91adf38eacf 100644 --- a/tests/components/dsmr/test_config_flow.py +++ b/tests/components/dsmr/test_config_flow.py @@ -10,7 +10,8 @@ import serial import serial.tools.list_ports from homeassistant import config_entries -from homeassistant.components.dsmr import DOMAIN, config_flow +from homeassistant.components.dsmr import config_flow +from homeassistant.components.dsmr.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType diff --git a/tests/components/dsmr/test_diagnostics.py b/tests/components/dsmr/test_diagnostics.py new file mode 100644 index 00000000000..8fc996f6e34 --- /dev/null +++ b/tests/components/dsmr/test_diagnostics.py @@ -0,0 +1,82 @@ +"""Test DSMR diagnostics.""" + +import datetime +from decimal import Decimal +from unittest.mock import MagicMock + +from dsmr_parser.obis_references import ( + CURRENT_ELECTRICITY_USAGE, + ELECTRICITY_ACTIVE_TARIFF, + GAS_METER_READING, +) +from dsmr_parser.objects import CosemObject, MBusObject, Telegram +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + entry_data = { + "port": "/dev/ttyUSB0", + "dsmr_version": "2.2", + "serial_id": "1234", + "serial_id_gas": "5678", + } + entry_options = { + "time_between_update": 0, + } + + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), + [{"value": Decimal("0.0"), "unit": "W"}], + ), + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m³"}, + ], + ), + "GAS_METER_READING", + ) + + mock_entry = MockConfigEntry( + domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options + ) + + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) + assert result == snapshot diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index 18f5e850ecd..20b3d253f39 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -5,11 +5,11 @@ from decimal import Decimal from unittest.mock import MagicMock from dsmr_parser.obis_references import ( - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING2, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram from homeassistant.components.dsmr.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN @@ -65,22 +65,31 @@ async def test_migrate_gas_to_mbus( assert entity.unique_id == old_unique_id await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() @@ -110,6 +119,106 @@ async def test_migrate_gas_to_mbus( ) +async def test_migrate_hourly_gas_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": "4730303738353635363037343639323231", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "4730303738353635363037343639323231_hourly_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "4730303738353635363037343639323231", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1722749707)}, + {"value": Decimal(778.963), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + dev_entities = er.async_entries_for_device( + entity_registry, device.id, include_disabled_entities=True + ) + assert not dev_entities + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "4730303738353635363037343639323231" + ) + == "sensor.gas_meter_reading" + ) + + async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -173,22 +282,31 @@ async def test_migrate_gas_to_mbus_exists( ) await hass.async_block_till_done() - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 0), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) assert await hass.config_entries.async_setup(mock_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index 435594d4eef..b93dd8d18d2 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -11,35 +11,24 @@ from decimal import Decimal from itertools import chain, repeat from unittest.mock import DEFAULT, MagicMock +from dsmr_parser import obis_references from dsmr_parser.obis_references import ( BELGIUM_CURRENT_AVERAGE_DEMAND, BELGIUM_MAXIMUM_DEMAND_MONTH, - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING1, - BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_DEVICE_TYPE, - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS2_METER_READING2, - BELGIUM_MBUS3_DEVICE_TYPE, - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS3_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - BELGIUM_MBUS4_DEVICE_TYPE, - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS4_METER_READING1, - BELGIUM_MBUS4_METER_READING2, CURRENT_ELECTRICITY_USAGE, ELECTRICITY_ACTIVE_TARIFF, ELECTRICITY_EXPORTED_TOTAL, ELECTRICITY_IMPORTED_TOTAL, GAS_METER_READING, HOURLY_GAS_METER_READING, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) -from dsmr_parser.objects import CosemObject, MBusObject +from dsmr_parser.objects import CosemObject, MBusObject, Telegram import pytest +from homeassistant.components.dsmr.sensor import SENSORS, SENSORS_MBUS_DEVICE_TYPE from homeassistant.components.sensor import ( ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -80,22 +69,31 @@ async def test_default_setup( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("0.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -134,22 +132,31 @@ async def test_default_setup( ) assert power_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "W" - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - GAS_METER_READING: MBusObject( - GAS_METER_READING, + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(745.701), "unit": UnitOfVolume.CUBIC_METERS}, ], ), - } + "GAS_METER_READING", + ) # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser telegram_callback(telegram) @@ -209,15 +216,20 @@ async def test_setup_only_energy( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -260,18 +272,23 @@ async def test_v4_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -343,18 +360,23 @@ async def test_v5_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": value, "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -411,23 +433,34 @@ async def test_luxembourg_meter( "time_between_update": 0, } - telegram = { - HOURLY_GAS_METER_READING: MBusObject( - HOURLY_GAS_METER_READING, + telegram = Telegram() + telegram.add( + HOURLY_GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + "HOURLY_GAS_METER_READING", + ) + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -495,78 +528,127 @@ async def test_belgian_meter( "time_between_update": 0, } - telegram = { - BELGIUM_CURRENT_AVERAGE_DEMAND: CosemObject( - BELGIUM_CURRENT_AVERAGE_DEMAND, + telegram = Telegram() + telegram.add( + BELGIUM_CURRENT_AVERAGE_DEMAND, + CosemObject( + (0, 0), [{"value": Decimal(1.75), "unit": "kW"}], ), - BELGIUM_MAXIMUM_DEMAND_MONTH: MBusObject( - BELGIUM_MAXIMUM_DEMAND_MONTH, + "BELGIUM_CURRENT_AVERAGE_DEMAND", + ) + telegram.add( + BELGIUM_MAXIMUM_DEMAND_MONTH, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(4.11), "unit": "kW"}, ], ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + "BELGIUM_MAXIMUM_DEMAND_MONTH", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING2: MBusObject( - BELGIUM_MBUS1_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(745.695), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING1: MBusObject( - BELGIUM_MBUS2_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642214)}, {"value": Decimal(678.695), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "MBUS_METER_READING", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -680,64 +762,103 @@ async def test_belgian_meter_alt( "time_between_update": 0, } - telegram = { - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS1_METER_READING1: MBusObject( - BELGIUM_MBUS1_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), [ {"value": datetime.datetime.fromtimestamp(1551642215)}, {"value": Decimal(123.456), "unit": "m3"}, ], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS2_METER_READING2: MBusObject( - BELGIUM_MBUS2_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 2), [ {"value": datetime.datetime.fromtimestamp(1551642216)}, {"value": Decimal(678.901), "unit": "m3"}, ], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING1: MBusObject( - BELGIUM_MBUS3_METER_READING1, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - BELGIUM_MBUS4_METER_READING2: MBusObject( - BELGIUM_MBUS4_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -829,49 +950,78 @@ async def test_belgian_meter_mbus( "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0003", "unit": ""}] - ), - BELGIUM_MBUS1_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS1_DEVICE_TYPE, [{"value": "006", "unit": ""}] - ), - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0003", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "006", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - BELGIUM_MBUS2_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS2_DEVICE_TYPE, [{"value": "003", "unit": ""}] - ), - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 2), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - BELGIUM_MBUS3_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS3_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER: CosemObject( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 3), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - BELGIUM_MBUS3_METER_READING2: MBusObject( - BELGIUM_MBUS3_METER_READING2, + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 3), [ {"value": datetime.datetime.fromtimestamp(1551642217)}, {"value": Decimal(12.12), "unit": "m3"}, ], ), - BELGIUM_MBUS4_DEVICE_TYPE: CosemObject( - BELGIUM_MBUS4_DEVICE_TYPE, [{"value": "007", "unit": ""}] - ), - BELGIUM_MBUS4_METER_READING1: MBusObject( - BELGIUM_MBUS4_METER_READING1, + "MBUS_METER_READING", + ) + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 4), [{"value": "007", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 4), [ {"value": datetime.datetime.fromtimestamp(1551642218)}, {"value": Decimal(13.13), "unit": "m3"}, ], ), - } + "MBUS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -894,20 +1044,32 @@ async def test_belgian_meter_mbus( active_tariff = hass.states.get("sensor.electricity_meter_active_tariff") assert active_tariff.state == "unknown" - # check if gas consumption mbus2 is parsed correctly + # check if gas consumption mbus1 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") assert gas_consumption is None - # check if water usage mbus3 is parsed correctly - water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") - assert water_consumption is None - - # check if gas consumption mbus4 is parsed correctly + # check if gas consumption mbus2 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") assert gas_consumption is None - # check if gas consumption mbus4 is parsed correctly + # check if water usage mbus3 is parsed correctly water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption + assert water_consumption.state == "12.12" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if gas consumption mbus4 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") assert water_consumption.state == "13.13" assert ( water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER @@ -938,11 +1100,12 @@ async def test_belgian_meter_low( "time_between_update": 0, } - telegram = { - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0002", "unit": ""}] - ) - } + telegram = Telegram() + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0002", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -986,16 +1149,23 @@ async def test_swedish_meter( "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(123.456), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(654.321), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1054,16 +1224,23 @@ async def test_easymeter( "time_between_update": 0, } - telegram = { - ELECTRICITY_IMPORTED_TOTAL: CosemObject( - ELECTRICITY_IMPORTED_TOTAL, + telegram = Telegram() + telegram.add( + ELECTRICITY_IMPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(54184.6316), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - ELECTRICITY_EXPORTED_TOTAL: CosemObject( - ELECTRICITY_EXPORTED_TOTAL, + "ELECTRICITY_IMPORTED_TOTAL", + ) + telegram.add( + ELECTRICITY_EXPORTED_TOTAL, + CosemObject( + (0, 0), [{"value": Decimal(19981.1069), "unit": UnitOfEnergy.KILO_WATT_HOUR}], ), - } + "ELECTRICITY_EXPORTED_TOTAL", + ) mock_entry = MockConfigEntry( domain="dsmr", @@ -1222,15 +1399,20 @@ async def test_reconnect( "time_between_update": 0, } - telegram = { - CURRENT_ELECTRICITY_USAGE: CosemObject( - CURRENT_ELECTRICITY_USAGE, + telegram = Telegram() + telegram.add( + CURRENT_ELECTRICITY_USAGE, + CosemObject( + (0, 0), [{"value": Decimal("35.0"), "unit": UnitOfPower.WATT}], ), - ELECTRICITY_ACTIVE_TARIFF: CosemObject( - ELECTRICITY_ACTIVE_TARIFF, [{"value": "0001", "unit": ""}] - ), - } + "CURRENT_ELECTRICITY_USAGE", + ) + telegram.add( + ELECTRICITY_ACTIVE_TARIFF, + CosemObject((0, 0), [{"value": "0001", "unit": ""}]), + "ELECTRICITY_ACTIVE_TARIFF", + ) # mock waiting coroutine while connection lasts closed = asyncio.Event() @@ -1300,15 +1482,18 @@ async def test_gas_meter_providing_energy_reading( "time_between_update": 0, } - telegram = { - GAS_METER_READING: MBusObject( - GAS_METER_READING, + telegram = Telegram() + telegram.add( + GAS_METER_READING, + MBusObject( + (0, 0), [ {"value": datetime.datetime.fromtimestamp(1551642213)}, {"value": Decimal(123.456), "unit": UnitOfEnergy.GIGA_JOULE}, ], ), - } + "GAS_METER_READING", + ) mock_entry = MockConfigEntry( domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options @@ -1334,3 +1519,13 @@ async def test_gas_meter_providing_energy_reading( gas_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.GIGA_JOULE ) + + +def test_all_obis_references_exists(): + """Verify that all attributes exist by name in database.""" + for sensor in SENSORS: + assert hasattr(obis_references, sensor.obis_reference) + + for sensors in SENSORS_MBUS_DEVICE_TYPE.values(): + for sensor in sensors: + assert hasattr(obis_references, sensor.obis_reference) diff --git a/tests/components/dsmr_reader/test_diagnostics.py b/tests/components/dsmr_reader/test_diagnostics.py index 553efd0b38b..793fe1362b0 100644 --- a/tests/components/dsmr_reader/test_diagnostics.py +++ b/tests/components/dsmr_reader/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.dsmr_reader.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,4 +37,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot + assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/duotecno/conftest.py b/tests/components/duotecno/conftest.py index 1b6ba8f65e5..1bdd26bab9c 100644 --- a/tests/components/duotecno/conftest.py +++ b/tests/components/duotecno/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the duotecno tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/dwd_weather_warnings/conftest.py b/tests/components/dwd_weather_warnings/conftest.py index 40c8bf3cfa0..50c0fe51024 100644 --- a/tests/components/dwd_weather_warnings/conftest.py +++ b/tests/components/dwd_weather_warnings/conftest.py @@ -1,9 +1,9 @@ """Configuration for Deutscher Wetterdienst (DWD) Weather Warnings tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.dwd_weather_warnings.const import ( ADVANCE_WARNING_SENSOR, diff --git a/tests/components/easyenergy/conftest.py b/tests/components/easyenergy/conftest.py index 96d356b8906..ffe0e36f3d2 100644 --- a/tests/components/easyenergy/conftest.py +++ b/tests/components/easyenergy/conftest.py @@ -1,11 +1,11 @@ """Fixtures for easyEnergy integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from easyenergy import Electricity, Gas import pytest -from typing_extensions import Generator from homeassistant.components.easyenergy.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/ecobee/conftest.py b/tests/components/ecobee/conftest.py index d9583e15986..01f249bea15 100644 --- a/tests/components/ecobee/conftest.py +++ b/tests/components/ecobee/conftest.py @@ -1,10 +1,10 @@ """Fixtures for tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from requests_mock import Mocker -from typing_extensions import Generator from homeassistant.components.ecobee import ECOBEE_API_KEY, ECOBEE_REFRESH_TOKEN diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index ae53132fe46..1c9dcec0ad2 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -195,7 +195,7 @@ async def test_hvac_mode(ecobee_fixture, thermostat) -> None: async def test_hvac_modes(thermostat) -> None: """Test operation list property.""" - assert ["heat_cool", "heat", "cool", "off"] == thermostat.hvac_modes + assert thermostat.hvac_modes == ["heat_cool", "heat", "cool", "off"] async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: @@ -208,51 +208,51 @@ async def test_hvac_mode2(ecobee_fixture, thermostat) -> None: async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: """Test device state attributes property.""" ecobee_fixture["equipmentStatus"] = "heatPump2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "heatPump2", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "auxHeat2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "auxHeat2", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "compCool1" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "compCool1", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "", - } == thermostat.extra_state_attributes + } ecobee_fixture["equipmentStatus"] = "Unknown" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate1", "fan_min_on_time": 10, "equipment_running": "Unknown", - } == thermostat.extra_state_attributes + } ecobee_fixture["program"]["currentClimateRef"] = "c2" - assert { + assert thermostat.extra_state_attributes == { "fan": "off", "climate_mode": "Climate2", "fan_min_on_time": 10, "equipment_running": "Unknown", - } == thermostat.extra_state_attributes + } async def test_is_aux_heat_on(hass: HomeAssistant) -> None: diff --git a/tests/components/ecoforest/conftest.py b/tests/components/ecoforest/conftest.py index 3eb13e58aee..85bfff08bdf 100644 --- a/tests/components/ecoforest/conftest.py +++ b/tests/components/ecoforest/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Ecoforest tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from pyecoforest.models.device import Alarm, Device, OperationMode, State import pytest -from typing_extensions import Generator from homeassistant.components.ecoforest import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ecovacs/conftest.py b/tests/components/ecovacs/conftest.py index 8d0033a6bc9..22039d6c0bc 100644 --- a/tests/components/ecovacs/conftest.py +++ b/tests/components/ecovacs/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for the Ecovacs tests.""" +from collections.abc import AsyncGenerator, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -9,7 +10,7 @@ from deebot_client.device import Device from deebot_client.exceptions import ApiError from deebot_client.models import Credentials import pytest -from typing_extensions import AsyncGenerator, Generator +from sucks import EventEmitter from homeassistant.components.ecovacs import PLATFORMS from homeassistant.components.ecovacs.const import DOMAIN @@ -117,6 +118,27 @@ def mock_mqtt_client(mock_authenticator: Mock) -> Generator[Mock]: yield client +@pytest.fixture +def mock_vacbot(device_fixture: str) -> Generator[Mock]: + """Mock the legacy VacBot.""" + with patch( + "homeassistant.components.ecovacs.controller.VacBot", + autospec=True, + ) as mock: + vacbot = mock.return_value + vacbot.vacuum = load_json_object_fixture( + f"devices/{device_fixture}/device.json", DOMAIN + ) + vacbot.statusEvents = EventEmitter() + vacbot.batteryEvents = EventEmitter() + vacbot.lifespanEvents = EventEmitter() + vacbot.errorEvents = EventEmitter() + vacbot.battery_status = None + vacbot.fan_speed = None + vacbot.components = {} + yield vacbot + + @pytest.fixture def mock_device_execute() -> Generator[AsyncMock]: """Mock the device execute function.""" @@ -154,7 +176,7 @@ async def init_integration( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) yield mock_config_entry diff --git a/tests/components/ecovacs/fixtures/devices/123/device.json b/tests/components/ecovacs/fixtures/devices/123/device.json new file mode 100644 index 00000000000..07bdf01b156 --- /dev/null +++ b/tests/components/ecovacs/fixtures/devices/123/device.json @@ -0,0 +1,23 @@ +{ + "did": "E1234567890000000003", + "name": "E1234567890000000003", + "class": "123", + "resource": "atom", + "company": "eco-legacy", + "deviceName": "DEEBOT Slim2 Series", + "icon": "https://portal-ww.ecouser.net/api/pim/file/get/5d2c150dba13eb00013feaae", + "ota": false, + "UILogicId": "ECO_INTL_123", + "materialNo": "110-1639-0102", + "pid": "5cae9b201285190001685977", + "product_category": "DEEBOT", + "model": "Slim2", + "updateInfo": { + "needUpdate": false, + "changeLog": "" + }, + "nick": null, + "homeSort": 9999, + "status": 2, + "otaUpgrade": {} +} diff --git a/tests/components/ecovacs/snapshots/test_button.ambr b/tests/components/ecovacs/snapshots/test_button.ambr index d250a60a35f..efae8896962 100644 --- a/tests/components/ecovacs/snapshots/test_button.ambr +++ b/tests/components/ecovacs/snapshots/test_button.ambr @@ -229,7 +229,7 @@ 'state': '2024-01-01T00:00:00+00:00', }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:entity-registry] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -241,7 +241,7 @@ 'disabled_by': None, 'domain': 'button', 'entity_category': , - 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -253,7 +253,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Reset side brushes lifespan', + 'original_name': 'Reset side brush lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -262,13 +262,13 @@ 'unit_of_measurement': None, }) # --- -# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brushes_lifespan:state] +# name: test_buttons[yna5x1][button.ozmo_950_reset_side_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Reset side brushes lifespan', + 'friendly_name': 'Ozmo 950 Reset side brush lifespan', }), 'context': , - 'entity_id': 'button.ozmo_950_reset_side_brushes_lifespan', + 'entity_id': 'button.ozmo_950_reset_side_brush_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/snapshots/test_init.ambr b/tests/components/ecovacs/snapshots/test_init.ambr index 3ce872e7898..9113445cc31 100644 --- a/tests/components/ecovacs/snapshots/test_init.ambr +++ b/tests/components/ecovacs/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Ecovacs', 'model': 'DEEBOT OZMO 950 Series', + 'model_id': 'yna5xi', 'name': 'Ozmo 950', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/ecovacs/snapshots/test_number.ambr b/tests/components/ecovacs/snapshots/test_number.ambr index da8406491b4..c80132784e1 100644 --- a/tests/components/ecovacs/snapshots/test_number.ambr +++ b/tests/components/ecovacs/snapshots/test_number.ambr @@ -1,4 +1,115 @@ # serializer version: 1 +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_cut_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cut direction', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cut_direction', + 'unique_id': '8516fbb1-17f1-4194-0000000_cut_direction', + 'unit_of_measurement': '°', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Cut direction', + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'number.goat_g1_cut_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '8516fbb1-17f1-4194-0000000_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.goat_g1_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- # name: test_number_entities[yna5x1][number.ozmo_950_volume:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index e2cee3d410f..659edfde2cf 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,4 +1,152 @@ # serializer version: 1 +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filter lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_filter', + 'unique_id': 'E1234567890000000003_lifespan_filter', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Filter lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_main_brush', + 'unique_id': 'E1234567890000000003_lifespan_main_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Main brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Side brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_side_brush', + 'unique_id': 'E1234567890000000003_lifespan_side_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Side brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_legacy_sensors[123][states] + list([ + 'sensor.e1234567890000000003_main_brush_lifespan', + 'sensor.e1234567890000000003_side_brush_lifespan', + 'sensor.e1234567890000000003_filter_lifespan', + ]) +# --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -910,7 +1058,7 @@ 'state': '80', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:entity-registry] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -922,7 +1070,7 @@ 'disabled_by': None, 'domain': 'sensor', 'entity_category': , - 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', 'has_entity_name': True, 'hidden_by': None, 'icon': None, @@ -934,7 +1082,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Side brushes lifespan', + 'original_name': 'Side brush lifespan', 'platform': 'ecovacs', 'previous_unique_id': None, 'supported_features': 0, @@ -943,14 +1091,14 @@ 'unit_of_measurement': '%', }) # --- -# name: test_sensors[yna5x1][sensor.ozmo_950_side_brushes_lifespan:state] +# name: test_sensors[yna5x1][sensor.ozmo_950_side_brush_lifespan:state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'friendly_name': 'Ozmo 950 Side brushes lifespan', + 'friendly_name': 'Ozmo 950 Side brush lifespan', 'unit_of_measurement': '%', }), 'context': , - 'entity_id': 'sensor.ozmo_950_side_brushes_lifespan', + 'entity_id': 'sensor.ozmo_950_side_brush_lifespan', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/ecovacs/test_button.py b/tests/components/ecovacs/test_button.py index 08d53f3e93d..4b3068f6cda 100644 --- a/tests/components/ecovacs/test_button.py +++ b/tests/components/ecovacs/test_button.py @@ -42,7 +42,7 @@ def platforms() -> Platform | list[Platform]: ResetLifeSpan(LifeSpan.FILTER), ), ( - "button.ozmo_950_reset_side_brushes_lifespan", + "button.ozmo_950_reset_side_brush_lifespan", ResetLifeSpan(LifeSpan.SIDE_BRUSH), ), ], @@ -107,7 +107,7 @@ async def test_buttons( [ "button.ozmo_950_reset_main_brush_lifespan", "button.ozmo_950_reset_filter_lifespan", - "button.ozmo_950_reset_side_brushes_lifespan", + "button.ozmo_950_reset_side_brush_lifespan", ], ), ( diff --git a/tests/components/ecovacs/test_diagnostics.py b/tests/components/ecovacs/test_diagnostics.py index b025db43cc0..6e4dcd5f677 100644 --- a/tests/components/ecovacs/test_diagnostics.py +++ b/tests/components/ecovacs/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, init_integration - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index 27d00a2d023..ac4d5661a83 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -129,12 +129,15 @@ async def test_devices_in_dr( assert device_entry == snapshot(name=device.device_info["did"]) -@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration") +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" +) @pytest.mark.parametrize( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 24), + ("5xu9h3", 25), + ("123", 1), ], ) async def test_all_entities_loaded( diff --git a/tests/components/ecovacs/test_number.py b/tests/components/ecovacs/test_number.py index d444d6510a8..a735863d40a 100644 --- a/tests/components/ecovacs/test_number.py +++ b/tests/components/ecovacs/test_number.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from deebot_client.command import Command -from deebot_client.commands.json import SetVolume -from deebot_client.events import Event, VolumeEvent +from deebot_client.commands.json import SetCutDirection, SetVolume +from deebot_client.events import CutDirectionEvent, Event, VolumeEvent import pytest from syrupy import SnapshotAssertion @@ -53,8 +53,23 @@ class NumberTestCase: ), ], ), + ( + "5xu9h3", + [ + NumberTestCase( + "number.goat_g1_volume", VolumeEvent(3, 11), "3", 7, SetVolume(7) + ), + NumberTestCase( + "number.goat_g1_cut_direction", + CutDirectionEvent(45), + "45", + 97, + SetCutDirection(97), + ), + ], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_number_entities( hass: HomeAssistant, @@ -107,8 +122,12 @@ async def test_number_entities( "yna5x1", ["number.ozmo_950_volume"], ), + ( + "5xu9h3", + ["number.goat_g1_cut_direction", "number.goat_g1_volume"], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_disabled_by_default_number_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] @@ -125,6 +144,7 @@ async def test_disabled_by_default_number_entities( @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize(("device_fixture"), ["yna5x1"]) async def test_volume_maximum( hass: HomeAssistant, controller: EcovacsController, diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 005d10bffbd..53c57999776 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -1,5 +1,7 @@ """Tests for Ecovacs sensors.""" +from unittest.mock import Mock + from deebot_client.event_bus import EventBus from deebot_client.events import ( BatteryEvent, @@ -64,7 +66,7 @@ async def notify_events(hass: HomeAssistant, event_bus: EventBus): "sensor.ozmo_950_wi_fi_ssid", "sensor.ozmo_950_main_brush_lifespan", "sensor.ozmo_950_filter_lifespan", - "sensor.ozmo_950_side_brushes_lifespan", + "sensor.ozmo_950_side_brush_lifespan", "sensor.ozmo_950_error", ], ), @@ -152,3 +154,34 @@ async def test_disabled_by_default_sensors( ), f"Entity registry entry for {entity_id} is missing" assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" +) +@pytest.mark.parametrize(("device_fixture"), ["123"]) +async def test_legacy_sensors( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_vacbot: Mock, +) -> None: + """Test that sensor entity snapshots match.""" + mock_vacbot.components = {"main_brush": 0.8, "side_brush": 0.6, "filter": 0.4} + mock_vacbot.lifespanEvents.notify("dummy_data") + await hass.async_block_till_done(wait_background_tasks=True) + + states = hass.states.async_entity_ids() + assert snapshot(name="states") == states + + for entity_id in hass.states.async_entity_ids(): + assert (state := hass.states.get(entity_id)), f"State of {entity_id} is missing" + assert snapshot(name=f"{entity_id}:state") == state + + assert (entity_entry := entity_registry.async_get(state.entity_id)) + assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry + + assert entity_entry.device_id + assert (device_entry := device_registry.async_get(entity_entry.device_id)) + assert device_entry.identifiers == {(DOMAIN, "E1234567890000000003")} diff --git a/tests/components/ecovacs/test_services.py b/tests/components/ecovacs/test_services.py index 973c63782ec..6fd10cde6d9 100644 --- a/tests/components/ecovacs/test_services.py +++ b/tests/components/ecovacs/test_services.py @@ -16,9 +16,7 @@ pytestmark = [pytest.mark.usefixtures("init_integration")] @pytest.fixture -def mock_device_execute_response( - data: dict[str, Any], -) -> Generator[dict[str, Any], None, None]: +def mock_device_execute_response(data: dict[str, Any]) -> Generator[dict[str, Any]]: """Mock the device execute function response.""" response = { @@ -73,7 +71,7 @@ def mock_device_execute_response( ) async def test_get_positions_service( hass: HomeAssistant, - mock_device_execute_response: dict[str], + mock_device_execute_response: dict[str, Any], entity_id: str, ) -> None: """Test that get_positions service response snapshots match.""" diff --git a/tests/components/edl21/conftest.py b/tests/components/edl21/conftest.py index b6af4ea9cef..1b14e3366d8 100644 --- a/tests/components/edl21/conftest.py +++ b/tests/components/edl21/conftest.py @@ -1,9 +1,9 @@ """Define test fixtures for EDL21.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/electric_kiwi/conftest.py b/tests/components/electric_kiwi/conftest.py index c9f9c7e04f0..010efcb7b5f 100644 --- a/tests/components/electric_kiwi/conftest.py +++ b/tests/components/electric_kiwi/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from time import time from unittest.mock import AsyncMock, patch from electrickiwi_api.model import AccountBalance, Hop, HopIntervals import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/elevenlabs/__init__.py b/tests/components/elevenlabs/__init__.py new file mode 100644 index 00000000000..261286f04f7 --- /dev/null +++ b/tests/components/elevenlabs/__init__.py @@ -0,0 +1 @@ +"""Tests for the ElevenLabs integration.""" diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py new file mode 100644 index 00000000000..13eb022243f --- /dev/null +++ b/tests/components/elevenlabs/conftest.py @@ -0,0 +1,65 @@ +"""Common fixtures for the ElevenLabs text-to-speech tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE +from homeassistant.const import CONF_API_KEY + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.elevenlabs.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_async_client() -> Generator[AsyncMock, None, None]: + """Override async ElevenLabs client.""" + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + ) as mock_async_client: + yield mock_async_client + + +@pytest.fixture +def mock_async_client_fail() -> Generator[AsyncMock, None, None]: + """Override async ElevenLabs client.""" + with patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + return_value=AsyncMock(), + ) as mock_async_client: + mock_async_client.side_effect = ApiError + yield mock_async_client + + +@pytest.fixture +def mock_entry() -> MockConfigEntry: + """Mock a config entry.""" + entry = MockConfigEntry( + domain="elevenlabs", + data={ + CONF_API_KEY: "api_key", + }, + options={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + entry.models = { + "model1": "model1", + } + + entry.voices = {"voice1": "voice1"} + return entry diff --git a/tests/components/elevenlabs/const.py b/tests/components/elevenlabs/const.py new file mode 100644 index 00000000000..e16e1fd1334 --- /dev/null +++ b/tests/components/elevenlabs/const.py @@ -0,0 +1,52 @@ +"""Constants for the Testing of the ElevenLabs text-to-speech integration.""" + +from elevenlabs.types import LanguageResponse, Model, Voice + +from homeassistant.components.elevenlabs.const import DEFAULT_MODEL + +MOCK_VOICES = [ + Voice( + voice_id="voice1", + name="Voice 1", + ), + Voice( + voice_id="voice2", + name="Voice 2", + ), +] + +MOCK_MODELS = [ + Model( + model_id="model1", + name="Model 1", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id="model2", + name="Model 2", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id=DEFAULT_MODEL, + name=DEFAULT_MODEL, + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), +] diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py new file mode 100644 index 00000000000..853c49d48ff --- /dev/null +++ b/tests/components/elevenlabs/test_config_flow.py @@ -0,0 +1,94 @@ +"""Test the ElevenLabs text-to-speech config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.elevenlabs.const import ( + CONF_MODEL, + CONF_VOICE, + DEFAULT_MODEL, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_step( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, +) -> None: + """Test user step create entry result.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "ElevenLabs" + assert result["data"] == { + "api_key": "api_key", + } + assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() + + +async def test_invalid_api_key( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock +) -> None: + """Test user step with invalid api key.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] + + mock_setup_entry.assert_not_called() + + +async def test_options_flow_init( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, + mock_entry: MockConfigEntry, +) -> None: + """Test options flow init.""" + mock_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_entry.options == {CONF_MODEL: "model1", CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py new file mode 100644 index 00000000000..7fa289f24ed --- /dev/null +++ b/tests/components/elevenlabs/test_tts.py @@ -0,0 +1,270 @@ +"""Tests for the ElevenLabs TTS entity.""" + +from __future__ import annotations + +from http import HTTPStatus +from typing import Any +from unittest.mock import AsyncMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components import tts +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE, DOMAIN +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) +from homeassistant.config import async_process_ha_core_config +from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY +from homeassistant.core import HomeAssistant, ServiceCall + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry, async_mock_service +from tests.components.tts.common import retrieve_media +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock): + """Mock writing tags.""" + + +@pytest.fixture(autouse=True) +def mock_tts_cache_dir_autouse(mock_tts_cache_dir): + """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +@pytest.fixture +async def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Mock media player calls.""" + return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) + + +@pytest.fixture(autouse=True) +async def setup_internal_url(hass: HomeAssistant) -> None: + """Set up internal url.""" + await async_process_ha_core_config( + hass, {"internal_url": "http://example.local:8123"} + ) + + +@pytest.fixture(name="setup") +async def setup_fixture( + hass: HomeAssistant, + config_data: dict[str, Any], + config_options: dict[str, Any], + request: pytest.FixtureRequest, + mock_async_client: AsyncMock, +) -> AsyncMock: + """Set up the test environment.""" + if request.param == "mock_config_entry_setup": + await mock_config_entry_setup(hass, config_data, config_options) + else: + raise RuntimeError("Invalid setup fixture") + + await hass.async_block_till_done() + return mock_async_client + + +@pytest.fixture(name="config_data") +def config_data_fixture() -> dict[str, Any]: + """Return config data.""" + return {} + + +@pytest.fixture(name="config_options") +def config_options_fixture() -> dict[str, Any]: + """Return config options.""" + return {} + + +async def mock_config_entry_setup( + hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] +) -> None: + """Mock config entry setup.""" + default_config_data = { + CONF_API_KEY: "api_key", + } + default_config_options = { + CONF_VOICE: "voice1", + CONF_MODEL: "model1", + } + config_entry = MockConfigEntry( + domain=DOMAIN, + data=default_config_data | config_data, + options=default_config_options | config_options, + ) + config_entry.add_to_hass(hass) + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", return_value=client_mock + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + +@pytest.mark.parametrize( + "config_data", + [ + {}, + {tts.CONF_LANG: "de"}, + {tts.CONF_LANG: "en"}, + {tts.CONF_LANG: "ja"}, + {tts.CONF_LANG: "es"}, + ], +) +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test tts service.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice2", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "de", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "es", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_lang_config( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with other langcodes in the config.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_error( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with http response 400.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + tts_entity._client.generate.side_effect = ApiError + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.NOT_FOUND + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) diff --git a/tests/components/elgato/conftest.py b/tests/components/elgato/conftest.py index aaaed0dc8da..73b09421576 100644 --- a/tests/components/elgato/conftest.py +++ b/tests/components/elgato/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Elgato integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from elgato import BatteryInfo, ElgatoNoBatteryError, Info, Settings, State import pytest -from typing_extensions import Generator from homeassistant.components.elgato.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT diff --git a/tests/components/elgato/snapshots/test_button.ambr b/tests/components/elgato/snapshots/test_button.ambr index 77555c85a06..dcf9d1c87d0 100644 --- a/tests/components/elgato/snapshots/test_button.ambr +++ b/tests/components/elgato/snapshots/test_button.ambr @@ -72,6 +72,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -154,6 +155,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_light.ambr b/tests/components/elgato/snapshots/test_light.ambr index 8e2962fc698..c3ab076ded2 100644 --- a/tests/components/elgato/snapshots/test_light.ambr +++ b/tests/components/elgato/snapshots/test_light.ambr @@ -104,6 +104,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -220,6 +221,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -336,6 +338,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Light Strip', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_sensor.ambr b/tests/components/elgato/snapshots/test_sensor.ambr index c2bcde7a66b..be0ec0a56c5 100644 --- a/tests/components/elgato/snapshots/test_sensor.ambr +++ b/tests/components/elgato/snapshots/test_sensor.ambr @@ -79,6 +79,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -171,6 +172,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -263,6 +265,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -352,6 +355,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -444,6 +448,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elgato/snapshots/test_switch.ambr b/tests/components/elgato/snapshots/test_switch.ambr index 12857a71cb3..ba95160d28a 100644 --- a/tests/components/elgato/snapshots/test_switch.ambr +++ b/tests/components/elgato/snapshots/test_switch.ambr @@ -71,6 +71,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , @@ -152,6 +153,7 @@ }), 'manufacturer': 'Elgato', 'model': 'Elgato Key Light Mini', + 'model_id': None, 'name': 'Frenck', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/elmax/conftest.py b/tests/components/elmax/conftest.py index 552aa138f1b..f92fc2f1827 100644 --- a/tests/components/elmax/conftest.py +++ b/tests/components/elmax/conftest.py @@ -1,5 +1,6 @@ """Configuration for Elmax tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, patch @@ -12,7 +13,6 @@ from elmax_api.constants import ( from httpx import Response import pytest import respx -from typing_extensions import Generator from . import ( MOCK_DIRECT_HOST, diff --git a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr index f09ba6752c5..f175fc707bb 100644 --- a/tests/components/elmax/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/elmax/snapshots/test_alarm_control_panel.ambr @@ -46,7 +46,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_2-entry] @@ -96,7 +96,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- # name: test_alarm_control_panels[alarm_control_panel.direct_panel_https_1_1_1_1_443_api_v2_area_3-entry] @@ -146,6 +146,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disarmed', }) # --- diff --git a/tests/components/elmax/test_alarm_control_panel.py b/tests/components/elmax/test_alarm_control_panel.py index 6e4f09710fc..76dc8845662 100644 --- a/tests/components/elmax/test_alarm_control_panel.py +++ b/tests/components/elmax/test_alarm_control_panel.py @@ -1,9 +1,11 @@ """Tests for the Elmax alarm control panels.""" +from datetime import timedelta from unittest.mock import patch from syrupy import SnapshotAssertion +from homeassistant.components.elmax import POLLING_SECONDS from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -12,6 +14,8 @@ from . import init_integration from tests.common import snapshot_platform +WAIT = timedelta(seconds=POLLING_SECONDS) + async def test_alarm_control_panels( hass: HomeAssistant, diff --git a/tests/components/elvia/conftest.py b/tests/components/elvia/conftest.py index 0708e5c698a..13955db49d5 100644 --- a/tests/components/elvia/conftest.py +++ b/tests/components/elvia/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Elvia tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/emoncms/__init__.py b/tests/components/emoncms/__init__.py new file mode 100644 index 00000000000..ecf3c54e9ed --- /dev/null +++ b/tests/components/emoncms/__init__.py @@ -0,0 +1 @@ +"""Tests for the emoncms component.""" diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py new file mode 100644 index 00000000000..500fff228e9 --- /dev/null +++ b/tests/components/emoncms/conftest.py @@ -0,0 +1,47 @@ +"""Fixtures for emoncms integration tests.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +UNITS = ["kWh", "Wh", "W", "V", "A", "VA", "°C", "°F", "K", "Hz", "hPa", ""] + + +def get_feed( + number: int, unit: str = "W", value: int = 18.04, timestamp: int = 1665509570 +): + """Generate feed details.""" + return { + "id": str(number), + "userid": "1", + "name": f"parameter {number}", + "tag": "tag", + "size": "35809224", + "unit": unit, + "time": timestamp, + "value": value, + } + + +FEEDS = [get_feed(i + 1, unit=unit) for i, unit in enumerate(UNITS)] + + +EMONCMS_FAILURE = {"success": False, "message": "failure"} + + +@pytest.fixture +async def emoncms_client() -> AsyncGenerator[AsyncMock]: + """Mock pyemoncms success response.""" + with ( + patch( + "homeassistant.components.emoncms.sensor.EmoncmsClient", autospec=True + ) as mock_client, + patch( + "homeassistant.components.emoncms.coordinator.EmoncmsClient", + new=mock_client, + ), + ): + client = mock_client.return_value + client.async_request.return_value = {"success": True, "message": FEEDS} + yield client diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..62c85aaba01 --- /dev/null +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -0,0 +1,24 @@ +# serializer version: 1 +# name: test_coordinator_update[sensor.emoncms_parameter_1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'FeedId': '1', + 'FeedName': 'parameter 1', + 'LastUpdated': 1665509570, + 'LastUpdatedStr': '2022-10-11T10:32:50-07:00', + 'Size': '35809224', + 'Tag': 'tag', + 'UserId': '1', + 'device_class': 'temperature', + 'friendly_name': 'EmonCMS parameter 1', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.emoncms_parameter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18.04', + }) +# --- diff --git a/tests/components/emoncms/test_sensor.py b/tests/components/emoncms/test_sensor.py new file mode 100644 index 00000000000..a039239077e --- /dev/null +++ b/tests/components/emoncms/test_sensor.py @@ -0,0 +1,90 @@ +"""Test emoncms sensor.""" + +from typing import Any +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_PLATFORM, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component + +from .conftest import EMONCMS_FAILURE, FEEDS, get_feed + +from tests.common import async_fire_time_changed + +YAML = { + CONF_PLATFORM: "emoncms", + CONF_API_KEY: "my_api_key", + CONF_ID: 1, + CONF_URL: "http://1.1.1.1", + CONF_ONLY_INCLUDE_FEEDID: [1, 2], + "scan_interval": 30, +} + + +@pytest.fixture +def emoncms_yaml_config() -> ConfigType: + """Mock emoncms configuration from yaml.""" + return {"sensor": YAML} + + +def get_entity_ids(feeds: list[dict[str, Any]]) -> list[str]: + """Get emoncms entity ids.""" + return [ + f"{SENSOR_DOMAIN}.{DOMAIN}_{feed["name"].replace(' ', '_')}" for feed in feeds + ] + + +def get_feeds(nbs: list[int]) -> list[dict[str, Any]]: + """Get feeds.""" + return [feed for feed in FEEDS if feed["id"] in str(nbs)] + + +async def test_coordinator_update( + hass: HomeAssistant, + emoncms_yaml_config: ConfigType, + snapshot: SnapshotAssertion, + emoncms_client: AsyncMock, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator update.""" + emoncms_client.async_request.return_value = { + "success": True, + "message": [get_feed(1, unit="°C")], + } + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) + await hass.async_block_till_done() + feeds = get_feeds([1]) + for entity_id in get_entity_ids(feeds): + state = hass.states.get(entity_id) + assert state == snapshot(name=entity_id) + + async def skip_time() -> None: + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + emoncms_client.async_request.return_value = { + "success": True, + "message": [get_feed(1, unit="°C", value=24.04, timestamp=1665509670)], + } + + await skip_time() + + for entity_id in get_entity_ids(feeds): + state = hass.states.get(entity_id) + assert state.attributes["LastUpdated"] == 1665509670 + assert state.state == "24.04" + + emoncms_client.async_request.return_value = EMONCMS_FAILURE + + await skip_time() + + assert f"Error fetching {DOMAIN}_coordinator data" in caplog.text diff --git a/tests/components/emulated_hue/test_hue_api.py b/tests/components/emulated_hue/test_hue_api.py index 4edd52b812d..28e269fdaeb 100644 --- a/tests/components/emulated_hue/test_hue_api.py +++ b/tests/components/emulated_hue/test_hue_api.py @@ -1,11 +1,14 @@ """The tests for the emulated Hue component.""" +from __future__ import annotations + import asyncio +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus from ipaddress import ip_address import json -from unittest.mock import patch +from unittest.mock import AsyncMock, _patch, patch from aiohttp.hdrs import CONTENT_TYPE from aiohttp.test_utils import TestClient @@ -56,6 +59,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util +from homeassistant.util.json import JsonObjectType from tests.common import ( async_fire_time_changed, @@ -104,14 +108,14 @@ ENTITY_IDS_BY_NUMBER = { ENTITY_NUMBERS_BY_ID = {v: k for k, v in ENTITY_IDS_BY_NUMBER.items()} -def patch_upnp(): +def patch_upnp() -> _patch[AsyncMock]: """Patch async_create_upnp_datagram_endpoint.""" return patch( "homeassistant.components.emulated_hue.async_create_upnp_datagram_endpoint" ) -async def async_get_lights(client): +async def async_get_lights(client: TestClient) -> JsonObjectType: """Get lights with the hue client.""" result = await client.get("/api/username/lights") assert result.status == HTTPStatus.OK @@ -131,7 +135,7 @@ async def _async_setup_emulated_hue(hass: HomeAssistant, conf: ConfigType) -> No @pytest.fixture -async def base_setup(hass): +async def base_setup(hass: HomeAssistant) -> None: """Set up homeassistant and http.""" await asyncio.gather( setup.async_setup_component(hass, "homeassistant", {}), @@ -142,7 +146,7 @@ async def base_setup(hass): @pytest.fixture(autouse=True) -async def wanted_platforms_only() -> None: +def wanted_platforms_only() -> Generator[None]: """Enable only the wanted demo platforms.""" with patch( "homeassistant.components.demo.COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM", @@ -159,7 +163,7 @@ async def wanted_platforms_only() -> None: @pytest.fixture -async def demo_setup(hass, wanted_platforms_only): +async def demo_setup(hass: HomeAssistant, wanted_platforms_only: None) -> None: """Fixture to setup demo platforms.""" # We need to do this to get access to homeassistant/turn_(on,off) setups = [ @@ -211,7 +215,9 @@ async def demo_setup(hass, wanted_platforms_only): @pytest.fixture -async def hass_hue(hass, base_setup, demo_setup): +async def hass_hue( + hass: HomeAssistant, base_setup: None, demo_setup: None +) -> HomeAssistant: """Set up a Home Assistant instance for these tests.""" await _async_setup_emulated_hue( hass, @@ -245,7 +251,7 @@ def _mock_hue_endpoints( @pytest.fixture async def hue_client( - hass_hue, hass_client_no_auth: ClientSessionGenerator + hass_hue: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> TestClient: """Create web client for emulated hue api.""" _mock_hue_endpoints( @@ -285,7 +291,7 @@ async def hue_client( return await hass_client_no_auth() -async def test_discover_lights(hass: HomeAssistant, hue_client) -> None: +async def test_discover_lights(hass: HomeAssistant, hue_client: TestClient) -> None: """Test the discovery of lights.""" result = await hue_client.get("/api/username/lights") @@ -343,7 +349,8 @@ async def test_discover_lights(hass: HomeAssistant, hue_client) -> None: assert device["state"][HUE_API_STATE_ON] is False -async def test_light_without_brightness_supported(hass_hue, hue_client) -> None: +@pytest.mark.usefixtures("hass_hue") +async def test_light_without_brightness_supported(hue_client: TestClient) -> None: """Test that light without brightness is supported.""" light_without_brightness_json = await perform_get_light_state( hue_client, "light.no_brightness", HTTPStatus.OK @@ -382,7 +389,9 @@ async def test_lights_all_dimmable( ) -async def test_light_without_brightness_can_be_turned_off(hass_hue, hue_client) -> None: +async def test_light_without_brightness_can_be_turned_off( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test that light without brightness can be turned off.""" hass_hue.states.async_set("light.no_brightness", "on", {}) turn_off_calls = [] @@ -417,7 +426,9 @@ async def test_light_without_brightness_can_be_turned_off(hass_hue, hue_client) assert "light.no_brightness" in call.data[ATTR_ENTITY_ID] -async def test_light_without_brightness_can_be_turned_on(hass_hue, hue_client) -> None: +async def test_light_without_brightness_can_be_turned_on( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test that light without brightness can be turned on.""" hass_hue.states.async_set("light.no_brightness", "off", {}) @@ -467,7 +478,9 @@ async def test_light_without_brightness_can_be_turned_on(hass_hue, hue_client) - (const.STATE_UNKNOWN, True), ], ) -async def test_reachable_for_state(hass_hue, hue_client, state, is_reachable) -> None: +async def test_reachable_for_state( + hass_hue: HomeAssistant, hue_client: TestClient, state: str, is_reachable: bool +) -> None: """Test that an entity is reported as unreachable if in unavailable state.""" entity_id = "light.ceiling_lights" @@ -478,7 +491,7 @@ async def test_reachable_for_state(hass_hue, hue_client, state, is_reachable) -> assert state_json["state"]["reachable"] == is_reachable, state_json -async def test_discover_full_state(hue_client) -> None: +async def test_discover_full_state(hue_client: TestClient) -> None: """Test the discovery of full state.""" result = await hue_client.get(f"/api/{HUE_API_USERNAME}") @@ -529,7 +542,7 @@ async def test_discover_full_state(hue_client) -> None: assert config_json["linkbutton"] is True -async def test_discover_config(hue_client) -> None: +async def test_discover_config(hue_client: TestClient) -> None: """Test the discovery of configuration.""" result = await hue_client.get(f"/api/{HUE_API_USERNAME}/config") @@ -587,7 +600,7 @@ async def test_discover_config(hue_client) -> None: assert "error" not in config_json -async def test_get_light_state(hass_hue, hue_client) -> None: +async def test_get_light_state(hass_hue: HomeAssistant, hue_client: TestClient) -> None: """Test the getting of light state.""" # Turn ceiling lights on and set to 127 brightness, and set light color await hass_hue.services.async_call( @@ -648,7 +661,9 @@ async def test_get_light_state(hass_hue, hue_client) -> None: ) -async def test_put_light_state(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_put_light_state( + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the setting of light states.""" await perform_put_test_on_ceiling_lights(hass_hue, hue_client) @@ -818,7 +833,7 @@ async def test_put_light_state(hass: HomeAssistant, hass_hue, hue_client) -> Non async def test_put_light_state_script( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test the setting of script variables.""" # Turn the kitchen light off first @@ -834,7 +849,7 @@ async def test_put_light_state_script( brightness = round(level * 254 / 100) script_result = await perform_put_light_state( - hass_hue, hue_client, "script.set_kitchen_light", True, brightness + hass_hue, hue_client, "script.set_kitchen_light", True, brightness=brightness ) script_result_json = await script_result.json() @@ -851,13 +866,15 @@ async def test_put_light_state_script( ) -async def test_put_light_state_climate_set_temperature(hass_hue, hue_client) -> None: +async def test_put_light_state_climate_set_temperature( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting climate temperature.""" brightness = 19 temperature = round(brightness / 254 * 100) hvac_result = await perform_put_light_state( - hass_hue, hue_client, "climate.hvac", True, brightness + hass_hue, hue_client, "climate.hvac", True, brightness=brightness ) hvac_result_json = await hvac_result.json() @@ -876,7 +893,9 @@ async def test_put_light_state_climate_set_temperature(hass_hue, hue_client) -> assert ecobee_result.status == HTTPStatus.UNAUTHORIZED -async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> None: +async def test_put_light_state_humidifier_set_humidity( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting humidifier target humidity.""" # Turn the humidifier off first await hass_hue.services.async_call( @@ -890,7 +909,7 @@ async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> humidity = round(brightness / 254 * 100) humidifier_result = await perform_put_light_state( - hass_hue, hue_client, "humidifier.humidifier", True, brightness + hass_hue, hue_client, "humidifier.humidifier", True, brightness=brightness ) humidifier_result_json = await humidifier_result.json() @@ -909,7 +928,9 @@ async def test_put_light_state_humidifier_set_humidity(hass_hue, hue_client) -> assert hygrostat_result.status == HTTPStatus.UNAUTHORIZED -async def test_put_light_state_media_player(hass_hue, hue_client) -> None: +async def test_put_light_state_media_player( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test turning on media player and setting volume.""" # Turn the music player off first await hass_hue.services.async_call( @@ -924,7 +945,7 @@ async def test_put_light_state_media_player(hass_hue, hue_client) -> None: brightness = round(level * 254) mp_result = await perform_put_light_state( - hass_hue, hue_client, "media_player.walkman", True, brightness + hass_hue, hue_client, "media_player.walkman", True, brightness=brightness ) mp_result_json = await mp_result.json() @@ -937,7 +958,9 @@ async def test_put_light_state_media_player(hass_hue, hue_client) -> None: assert walkman.attributes[media_player.ATTR_MEDIA_VOLUME_LEVEL] == level -async def test_open_cover_without_position(hass_hue, hue_client) -> None: +async def test_open_cover_without_position( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test opening cover .""" cover_id = "cover.living_room_window" # Close cover first @@ -1000,7 +1023,9 @@ async def test_open_cover_without_position(hass_hue, hue_client) -> None: assert cover_test_2.attributes.get("current_position") == 0 -async def test_set_position_cover(hass_hue, hue_client) -> None: +async def test_set_position_cover( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test setting position cover .""" cover_id = "cover.living_room_window" cover_number = ENTITY_NUMBERS_BY_ID[cover_id] @@ -1034,7 +1059,7 @@ async def test_set_position_cover(hass_hue, hue_client) -> None: # Go through the API to open cover_result = await perform_put_light_state( - hass_hue, hue_client, cover_id, False, brightness + hass_hue, hue_client, cover_id, False, brightness=brightness ) assert cover_result.status == HTTPStatus.OK @@ -1057,7 +1082,9 @@ async def test_set_position_cover(hass_hue, hue_client) -> None: assert cover_test_2.attributes.get("current_position") == level -async def test_put_light_state_fan(hass_hue, hue_client) -> None: +async def test_put_light_state_fan( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test turning on fan and setting speed.""" # Turn the fan off first await hass_hue.services.async_call( @@ -1072,7 +1099,7 @@ async def test_put_light_state_fan(hass_hue, hue_client) -> None: brightness = round(level * 254 / 100) fan_result = await perform_put_light_state( - hass_hue, hue_client, "fan.living_room_fan", True, brightness + hass_hue, hue_client, "fan.living_room_fan", True, brightness=brightness ) fan_result_json = await fan_result.json() @@ -1166,7 +1193,9 @@ async def test_put_light_state_fan(hass_hue, hue_client) -> None: assert fan_json["state"][HUE_API_STATE_BRI] == 1 -async def test_put_with_form_urlencoded_content_type(hass_hue, hue_client) -> None: +async def test_put_with_form_urlencoded_content_type( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the form with urlencoded content.""" entity_number = ENTITY_NUMBERS_BY_ID["light.ceiling_lights"] # Needed for Alexa @@ -1185,7 +1214,7 @@ async def test_put_with_form_urlencoded_content_type(hass_hue, hue_client) -> No assert result.status == HTTPStatus.BAD_REQUEST -async def test_entity_not_found(hue_client) -> None: +async def test_entity_not_found(hue_client: TestClient) -> None: """Test for entity which are not found.""" result = await hue_client.get("/api/username/lights/98") @@ -1196,7 +1225,7 @@ async def test_entity_not_found(hue_client) -> None: assert result.status == HTTPStatus.NOT_FOUND -async def test_allowed_methods(hue_client) -> None: +async def test_allowed_methods(hue_client: TestClient) -> None: """Test the allowed methods.""" result = await hue_client.get( "/api/username/lights/ENTITY_NUMBERS_BY_ID[light.ceiling_lights]/state" @@ -1215,7 +1244,7 @@ async def test_allowed_methods(hue_client) -> None: assert result.status == HTTPStatus.METHOD_NOT_ALLOWED -async def test_proper_put_state_request(hue_client) -> None: +async def test_proper_put_state_request(hue_client: TestClient) -> None: """Test the request to set the state.""" # Test proper on value parsing result = await hue_client.put( @@ -1238,7 +1267,7 @@ async def test_proper_put_state_request(hue_client) -> None: assert result.status == HTTPStatus.BAD_REQUEST -async def test_get_empty_groups_state(hue_client) -> None: +async def test_get_empty_groups_state(hue_client: TestClient) -> None: """Test the request to get groups endpoint.""" # Test proper on value parsing result = await hue_client.get("/api/username/groups") @@ -1251,7 +1280,9 @@ async def test_get_empty_groups_state(hue_client) -> None: async def perform_put_test_on_ceiling_lights( - hass_hue, hue_client, content_type=CONTENT_TYPE_JSON + hass_hue: HomeAssistant, + hue_client: TestClient, + content_type: str = CONTENT_TYPE_JSON, ): """Test the setting of a light.""" # Turn the office light off first @@ -1267,7 +1298,12 @@ async def perform_put_test_on_ceiling_lights( # Go through the API to turn it on office_result = await perform_put_light_state( - hass_hue, hue_client, "light.ceiling_lights", True, 56, content_type + hass_hue, + hue_client, + "light.ceiling_lights", + True, + brightness=56, + content_type=content_type, ) assert office_result.status == HTTPStatus.OK @@ -1283,7 +1319,9 @@ async def perform_put_test_on_ceiling_lights( assert ceiling_lights.attributes[light.ATTR_BRIGHTNESS] == 56 -async def perform_get_light_state_by_number(client, entity_number, expected_status): +async def perform_get_light_state_by_number( + client: TestClient, entity_number: int | str, expected_status: HTTPStatus +) -> JsonObjectType | None: """Test the getting of a light state.""" result = await client.get(f"/api/username/lights/{entity_number}") @@ -1297,7 +1335,9 @@ async def perform_get_light_state_by_number(client, entity_number, expected_stat return None -async def perform_get_light_state(client, entity_id, expected_status): +async def perform_get_light_state( + client: TestClient, entity_id: str, expected_status: HTTPStatus +) -> JsonObjectType | None: """Test the getting of a light state.""" entity_number = ENTITY_NUMBERS_BY_ID[entity_id] return await perform_get_light_state_by_number( @@ -1306,18 +1346,19 @@ async def perform_get_light_state(client, entity_id, expected_status): async def perform_put_light_state( - hass_hue, - client, - entity_id, - is_on, - brightness=None, - content_type=CONTENT_TYPE_JSON, - hue=None, - saturation=None, - color_temp=None, - with_state=True, - xy=None, - transitiontime=None, + hass_hue: HomeAssistant, + client: TestClient, + entity_id: str, + is_on: bool, + *, + brightness: int | None = None, + content_type: str = CONTENT_TYPE_JSON, + hue: int | None = None, + saturation: int | None = None, + color_temp: int | None = None, + with_state: bool = True, + xy: tuple[float, float] | None = None, + transitiontime: int | None = None, ): """Test the setting of a light state.""" req_headers = {"Content-Type": content_type} @@ -1353,7 +1394,7 @@ async def perform_put_light_state( return result -async def test_external_ip_blocked(hue_client) -> None: +async def test_external_ip_blocked(hue_client: TestClient) -> None: """Test external IP blocked.""" getUrls = [ "/api/username/groups", @@ -1391,7 +1432,7 @@ async def test_external_ip_blocked(hue_client) -> None: _remote_is_allowed.cache_clear() -async def test_unauthorized_user_blocked(hue_client) -> None: +async def test_unauthorized_user_blocked(hue_client: TestClient) -> None: """Test unauthorized_user blocked.""" getUrls = [ "/api/wronguser", @@ -1405,7 +1446,7 @@ async def test_unauthorized_user_blocked(hue_client) -> None: async def test_put_then_get_cached_properly( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test the setting of light states and an immediate readback reads the same values.""" @@ -1530,7 +1571,7 @@ async def test_put_then_get_cached_properly( async def test_put_than_get_when_service_call_fails( - hass: HomeAssistant, hass_hue, hue_client + hass: HomeAssistant, hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test putting and getting the light state when the service call fails.""" @@ -1581,14 +1622,17 @@ async def test_put_than_get_when_service_call_fails( assert ceiling_json["state"][HUE_API_STATE_ON] is False -async def test_get_invalid_entity(hass: HomeAssistant, hass_hue, hue_client) -> None: +@pytest.mark.usefixtures("hass_hue") +async def test_get_invalid_entity(hue_client: TestClient) -> None: """Test the setting of light states and an immediate readback reads the same values.""" # Check that we get an error with an invalid entity number. await perform_get_light_state_by_number(hue_client, 999, HTTPStatus.NOT_FOUND) -async def test_put_light_state_scene(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_put_light_state_scene( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the setting of scene variables.""" # Turn the kitchen lights off first await hass_hue.services.async_call( @@ -1630,7 +1674,9 @@ async def test_put_light_state_scene(hass: HomeAssistant, hass_hue, hue_client) assert hass_hue.states.get("light.kitchen_lights").state == STATE_OFF -async def test_only_change_contrast(hass: HomeAssistant, hass_hue, hue_client) -> None: +async def test_only_change_contrast( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test when only changing the contrast of a light state.""" # Turn the kitchen lights off first @@ -1661,7 +1707,7 @@ async def test_only_change_contrast(hass: HomeAssistant, hass_hue, hue_client) - async def test_only_change_hue_or_saturation( - hass: HomeAssistant, hass_hue, hue_client + hass_hue: HomeAssistant, hue_client: TestClient ) -> None: """Test setting either the hue or the saturation but not both.""" @@ -1700,8 +1746,9 @@ async def test_only_change_hue_or_saturation( ] == (0, 3) +@pytest.mark.usefixtures("base_setup") async def test_specificly_exposed_entities( - hass: HomeAssistant, base_setup, hass_client_no_auth: ClientSessionGenerator + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator ) -> None: """Test specific entities with expose by default off.""" conf = { @@ -1731,7 +1778,9 @@ async def test_specificly_exposed_entities( assert "1" in result_json -async def test_get_light_state_when_none(hass_hue: HomeAssistant, hue_client) -> None: +async def test_get_light_state_when_none( + hass_hue: HomeAssistant, hue_client: TestClient +) -> None: """Test the getting of light state when brightness is None.""" hass_hue.states.async_set( "light.ceiling_lights", diff --git a/tests/components/emulated_hue/test_upnp.py b/tests/components/emulated_hue/test_upnp.py index 3522f7e8047..b16fda536c6 100644 --- a/tests/components/emulated_hue/test_upnp.py +++ b/tests/components/emulated_hue/test_upnp.py @@ -1,6 +1,7 @@ """The tests for the emulated Hue component.""" from asyncio import AbstractEventLoop +from collections.abc import Generator from http import HTTPStatus import json import unittest @@ -10,7 +11,6 @@ from aiohttp import web from aiohttp.test_utils import TestClient import defusedxml.ElementTree as ET import pytest -from typing_extensions import Generator from homeassistant import setup from homeassistant.components import emulated_hue diff --git a/tests/components/energenie_power_sockets/conftest.py b/tests/components/energenie_power_sockets/conftest.py index 64eb8bbd2a8..c142e436fd3 100644 --- a/tests/components/energenie_power_sockets/conftest.py +++ b/tests/components/energenie_power_sockets/conftest.py @@ -1,11 +1,11 @@ """Configure tests for Energenie-Power-Sockets.""" +from collections.abc import Generator from typing import Final from unittest.mock import MagicMock, patch from pyegps.fakes.powerstrip import FakePowerStrip import pytest -from typing_extensions import Generator from homeassistant.components.energenie_power_sockets.const import ( CONF_DEVICE_API_ID, diff --git a/tests/components/energyzero/conftest.py b/tests/components/energyzero/conftest.py index 49f6c18b09e..d42283c0d4b 100644 --- a/tests/components/energyzero/conftest.py +++ b/tests/components/energyzero/conftest.py @@ -1,11 +1,11 @@ """Fixtures for EnergyZero integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from energyzero import Electricity, Gas import pytest -from typing_extensions import Generator from homeassistant.components.energyzero.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/energyzero/snapshots/test_sensor.ambr b/tests/components/energyzero/snapshots/test_sensor.ambr index da52526192e..3a66f25fd32 100644 --- a/tests/components/energyzero/snapshots/test_sensor.ambr +++ b/tests/components/energyzero/snapshots/test_sensor.ambr @@ -62,6 +62,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -137,6 +138,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -209,6 +211,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -281,6 +284,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -353,6 +357,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Energy market price', 'name_by_user': None, 'primary_config_entry': , @@ -428,6 +433,7 @@ }), 'manufacturer': 'EnergyZero', 'model': None, + 'model_id': None, 'name': 'Gas market price', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/enigma2/conftest.py b/tests/components/enigma2/conftest.py index f879fb327d7..6c024ebf66a 100644 --- a/tests/components/enigma2/conftest.py +++ b/tests/components/enigma2/conftest.py @@ -1,5 +1,7 @@ """Test the Enigma2 config flow.""" +from openwebif.api import OpenWebIfServiceEvent, OpenWebIfStatus + from homeassistant.components.enigma2.const import ( CONF_DEEP_STANDBY, CONF_MAC_ADDRESS, @@ -66,7 +68,11 @@ class MockDevice: mac_address: str | None = "12:34:56:78:90:ab" _base = "http://1.1.1.1" - async def _call_api(self, url: str) -> dict: + def __init__(self) -> None: + """Initialize the mock Enigma2 device.""" + self.status = OpenWebIfStatus(currservice=OpenWebIfServiceEvent()) + + async def _call_api(self, url: str) -> dict | None: if url.endswith("/api/about"): return { "info": { @@ -74,11 +80,14 @@ class MockDevice: { "mac": self.mac_address, } - ] + ], + "model": "Mock Enigma2", + "brand": "Enigma2", } } + return None - def get_version(self): + def get_version(self) -> str | None: """Return the version.""" return None @@ -97,5 +106,8 @@ class MockDevice: ] } + async def update(self) -> None: + """Mock update.""" + async def close(self): """Mock close.""" diff --git a/tests/components/enigma2/test_init.py b/tests/components/enigma2/test_init.py index 93a130eef54..ab19c2ce51a 100644 --- a/tests/components/enigma2/test_init.py +++ b/tests/components/enigma2/test_init.py @@ -15,7 +15,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" with ( patch( - "homeassistant.components.enigma2.OpenWebIfDevice.__new__", + "homeassistant.components.enigma2.coordinator.OpenWebIfDevice.__new__", return_value=MockDevice(), ), patch( diff --git a/tests/components/enphase_envoy/__init__.py b/tests/components/enphase_envoy/__init__.py index 6c6293ab76b..f69ab8e44f2 100644 --- a/tests/components/enphase_envoy/__init__.py +++ b/tests/components/enphase_envoy/__init__.py @@ -1 +1,13 @@ """Tests for the Enphase Envoy integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 5dd62419b2b..ab6e0e4f097 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -1,51 +1,60 @@ """Define test fixtures for Enphase Envoy.""" +from collections.abc import AsyncGenerator, Generator +from typing import Any from unittest.mock import AsyncMock, Mock, patch import jwt from pyenphase import ( - Envoy, EnvoyData, + EnvoyEncharge, + EnvoyEnchargeAggregate, + EnvoyEnchargePower, + EnvoyEnpower, EnvoyInverter, EnvoySystemConsumption, EnvoySystemProduction, EnvoyTokenAuth, ) -from pyenphase.const import PhaseNames, SupportedFeatures -from pyenphase.models.meters import ( - CtMeterStatus, - CtState, - CtStatusFlags, - CtType, - EnvoyMeterData, - EnvoyPhaseMode, -) +from pyenphase.const import SupportedFeatures +from pyenphase.models.dry_contacts import EnvoyDryContactSettings, EnvoyDryContactStatus +from pyenphase.models.meters import EnvoyMeterData +from pyenphase.models.tariff import EnvoyStorageSettings, EnvoyTariff import pytest from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.enphase_envoy.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry @pytest.fixture(name="config_entry") -def config_entry_fixture(hass: HomeAssistant, config, serial_number): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, str] +) -> MockConfigEntry: """Define a config entry fixture.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title=f"Envoy {serial_number}" if serial_number else "Envoy", - unique_id=serial_number, + title="Envoy 1234", + unique_id="1234", data=config, ) - entry.add_to_hass(hass) - return entry @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, str]: """Define a config entry data fixture.""" return { CONF_HOST: "1.1.1.1", @@ -55,344 +64,173 @@ def config_fixture(): } -@pytest.fixture(name="mock_envoy") -def mock_envoy_fixture( - serial_number, - mock_authenticate, - mock_setup, - mock_auth, -): +@pytest.fixture +async def mock_envoy( + request: pytest.FixtureRequest, +) -> AsyncGenerator[AsyncMock]: """Define a mocked Envoy fixture.""" - mock_envoy = Mock(spec=Envoy) - mock_envoy.serial_number = serial_number - mock_envoy.firmware = "7.1.2" - mock_envoy.part_number = "123456789" - mock_envoy.envoy_model = "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT" - mock_envoy.authenticate = mock_authenticate - mock_envoy.setup = mock_setup - mock_envoy.auth = mock_auth - mock_envoy.supported_features = SupportedFeatures( - SupportedFeatures.INVERTERS - | SupportedFeatures.PRODUCTION - | SupportedFeatures.PRODUCTION - | SupportedFeatures.METERING - | SupportedFeatures.THREEPHASE - | SupportedFeatures.CTMETERS - ) - mock_envoy.phase_mode = EnvoyPhaseMode.THREE - mock_envoy.phase_count = 3 - mock_envoy.active_phase_count = 3 - mock_envoy.ct_meter_count = 3 - mock_envoy.consumption_meter_type = CtType.NET_CONSUMPTION - mock_envoy.production_meter_type = CtType.PRODUCTION - mock_envoy.storage_meter_type = CtType.STORAGE - mock_envoy.data = EnvoyData( - system_consumption=EnvoySystemConsumption( - watt_hours_last_7_days=1234, - watt_hours_lifetime=1234, - watt_hours_today=1234, - watts_now=1234, - ), - system_production=EnvoySystemProduction( - watt_hours_last_7_days=1234, - watt_hours_lifetime=1234, - watt_hours_today=1234, - watts_now=1234, - ), - system_consumption_phases={ - PhaseNames.PHASE_1: EnvoySystemConsumption( - watt_hours_last_7_days=1321, - watt_hours_lifetime=1322, - watt_hours_today=1323, - watts_now=1324, - ), - PhaseNames.PHASE_2: EnvoySystemConsumption( - watt_hours_last_7_days=2321, - watt_hours_lifetime=2322, - watt_hours_today=2323, - watts_now=2324, - ), - PhaseNames.PHASE_3: EnvoySystemConsumption( - watt_hours_last_7_days=3321, - watt_hours_lifetime=3322, - watt_hours_today=3323, - watts_now=3324, - ), - }, - system_production_phases={ - PhaseNames.PHASE_1: EnvoySystemProduction( - watt_hours_last_7_days=1231, - watt_hours_lifetime=1232, - watt_hours_today=1233, - watts_now=1234, - ), - PhaseNames.PHASE_2: EnvoySystemProduction( - watt_hours_last_7_days=2231, - watt_hours_lifetime=2232, - watt_hours_today=2233, - watts_now=2234, - ), - PhaseNames.PHASE_3: EnvoySystemProduction( - watt_hours_last_7_days=3231, - watt_hours_lifetime=3232, - watt_hours_today=3233, - watts_now=3234, - ), - }, - ctmeter_production=EnvoyMeterData( - eid="100000010", - timestamp=1708006110, - energy_delivered=11234, - energy_received=12345, - active_power=100, - power_factor=0.11, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[ - CtStatusFlags.PODUCTION_IMBALANCE, - CtStatusFlags.POWER_ON_UNUSED_PHASE, - ], - ), - ctmeter_consumption=EnvoyMeterData( - eid="100000020", - timestamp=1708006120, - energy_delivered=21234, - energy_received=22345, - active_power=101, - power_factor=0.21, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - ctmeter_storage=EnvoyMeterData( - eid="100000030", - timestamp=1708006120, - energy_delivered=31234, - energy_received=32345, - active_power=103, - power_factor=0.23, - voltage=113, - current=0.4, - frequency=50.3, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - ctmeter_production_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000011", - timestamp=1708006111, - energy_delivered=112341, - energy_received=123451, - active_power=20, - power_factor=0.12, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[CtStatusFlags.PODUCTION_IMBALANCE], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000012", - timestamp=1708006112, - energy_delivered=112342, - energy_received=123452, - active_power=30, - power_factor=0.13, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[CtStatusFlags.POWER_ON_UNUSED_PHASE], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000013", - timestamp=1708006113, - energy_delivered=112343, - energy_received=123453, - active_power=50, - power_factor=0.14, - voltage=111, - current=0.2, - frequency=50.1, - state=CtState.ENABLED, - measurement_type=CtType.PRODUCTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - ctmeter_consumption_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000021", - timestamp=1708006121, - energy_delivered=212341, - energy_received=223451, - active_power=21, - power_factor=0.22, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000022", - timestamp=1708006122, - energy_delivered=212342, - energy_received=223452, - active_power=31, - power_factor=0.23, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000023", - timestamp=1708006123, - energy_delivered=212343, - energy_received=223453, - active_power=51, - power_factor=0.24, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.NET_CONSUMPTION, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - ctmeter_storage_phases={ - PhaseNames.PHASE_1: EnvoyMeterData( - eid="100000031", - timestamp=1708006121, - energy_delivered=312341, - energy_received=323451, - active_power=22, - power_factor=0.32, - voltage=113, - current=0.4, - frequency=50.3, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_2: EnvoyMeterData( - eid="100000032", - timestamp=1708006122, - energy_delivered=312342, - energy_received=323452, - active_power=33, - power_factor=0.23, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - PhaseNames.PHASE_3: EnvoyMeterData( - eid="100000033", - timestamp=1708006123, - energy_delivered=312343, - energy_received=323453, - active_power=53, - power_factor=0.24, - voltage=112, - current=0.3, - frequency=50.2, - state=CtState.ENABLED, - measurement_type=CtType.STORAGE, - metering_status=CtMeterStatus.NORMAL, - status_flags=[], - ), - }, - inverters={ - "1": EnvoyInverter( - serial_number="1", - last_report_date=1, - last_report_watts=1, - max_report_watts=1, - ) - }, - raw={"varies_by": "firmware_version"}, - ) - mock_envoy.update = AsyncMock(return_value=mock_envoy.data) - - response = Mock() - response.status_code = 200 - response.text = "Testing request \nreplies." - response.headers = {"Hello": "World"} - mock_envoy.request = AsyncMock(return_value=response) - - return mock_envoy - - -@pytest.fixture(name="setup_enphase_envoy") -async def setup_enphase_envoy_fixture( - hass: HomeAssistant, - config, - mock_envoy, -): - """Define a fixture to set up Enphase Envoy.""" with ( patch( "homeassistant.components.enphase_envoy.config_flow.Envoy", - return_value=mock_envoy, - ), + autospec=True, + ) as mock_client, patch( "homeassistant.components.enphase_envoy.Envoy", - return_value=mock_envoy, + new=mock_client, ), ): - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - yield + mock_envoy = mock_client.return_value + # Add the fixtures specified + token = jwt.encode( + payload={"name": "envoy", "exp": 1907837780}, + key="secret", + algorithm="HS256", + ) + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + mock_envoy.serial_number = "1234" + mock = Mock() + mock.status_code = 200 + mock.text = "Testing request \nreplies." + mock.headers = {"Hello": "World"} + mock_envoy.request.return_value = mock + + # determine fixture file name, default envoy if no request passed + fixture_name = "envoy" + if hasattr(request, "param"): + fixture_name = request.param + + # Load envoy model from fixture + load_envoy_fixture(mock_envoy, fixture_name) + mock_envoy.update.return_value = mock_envoy.data + + yield mock_envoy -@pytest.fixture(name="mock_authenticate") -def mock_authenticate(): - """Define a mocked Envoy.authenticate fixture.""" - return AsyncMock() +def load_envoy_fixture(mock_envoy: AsyncMock, fixture_name: str) -> None: + """Load envoy model from fixture.""" - -@pytest.fixture(name="mock_auth") -def mock_auth(serial_number): - """Define a mocked EnvoyAuth fixture.""" - token = jwt.encode( - payload={"name": "envoy", "exp": 1907837780}, key="secret", algorithm="HS256" + json_fixture: dict[str, Any] = load_json_object_fixture( + f"{fixture_name}.json", DOMAIN ) - return EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial=serial_number) + + mock_envoy.firmware = json_fixture["firmware"] + mock_envoy.part_number = json_fixture["part_number"] + mock_envoy.envoy_model = json_fixture["envoy_model"] + mock_envoy.supported_features = SupportedFeatures( + json_fixture["supported_features"] + ) + mock_envoy.phase_mode = json_fixture["phase_mode"] + mock_envoy.phase_count = json_fixture["phase_count"] + mock_envoy.active_phase_count = json_fixture["active_phase_count"] + mock_envoy.ct_meter_count = json_fixture["ct_meter_count"] + mock_envoy.consumption_meter_type = json_fixture["consumption_meter_type"] + mock_envoy.production_meter_type = json_fixture["production_meter_type"] + mock_envoy.storage_meter_type = json_fixture["storage_meter_type"] + + mock_envoy.data = EnvoyData() + _load_json_2_production_data(mock_envoy.data, json_fixture) + _load_json_2_meter_data(mock_envoy.data, json_fixture) + _load_json_2_inverter_data(mock_envoy.data, json_fixture) + _load_json_2_encharge_enpower_data(mock_envoy.data, json_fixture) + _load_json_2_raw_data(mock_envoy.data, json_fixture) -@pytest.fixture(name="mock_setup") -def mock_setup(): - """Define a mocked Envoy.setup fixture.""" - return AsyncMock() +def _load_json_2_production_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy production data from fixture.""" + if item := json_fixture["data"].get("system_consumption"): + mocked_data.system_consumption = EnvoySystemConsumption(**item) + if item := json_fixture["data"].get("system_production"): + mocked_data.system_production = EnvoySystemProduction(**item) + if item := json_fixture["data"].get("system_consumption_phases"): + mocked_data.system_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_consumption_phases[sub_item] = EnvoySystemConsumption( + **item_data + ) + if item := json_fixture["data"].get("system_production_phases"): + mocked_data.system_production_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_production_phases[sub_item] = EnvoySystemProduction( + **item_data + ) -@pytest.fixture(name="serial_number") -def serial_number_fixture(): - """Define a serial number fixture.""" - return "1234" +def _load_json_2_meter_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy meter data from fixture.""" + if item := json_fixture["data"].get("ctmeter_production"): + mocked_data.ctmeter_production = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_consumption"): + mocked_data.ctmeter_consumption = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_storage"): + mocked_data.ctmeter_storage = EnvoyMeterData(**item) + if item := json_fixture["data"].get("ctmeter_production_phases"): + mocked_data.ctmeter_production_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_production_phases[sub_item] = EnvoyMeterData( + **item_data + ) + if item := json_fixture["data"].get("ctmeter_consumption_phases"): + mocked_data.ctmeter_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_consumption_phases[sub_item] = EnvoyMeterData( + **item_data + ) + if item := json_fixture["data"].get("ctmeter_storage_phases"): + mocked_data.ctmeter_storage_phases = {} + for sub_item, item_data in item.items(): + mocked_data.ctmeter_storage_phases[sub_item] = EnvoyMeterData(**item_data) + + +def _load_json_2_inverter_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy inverter data from fixture.""" + if item := json_fixture["data"].get("inverters"): + mocked_data.inverters = {} + for sub_item, item_data in item.items(): + mocked_data.inverters[sub_item] = EnvoyInverter(**item_data) + + +def _load_json_2_encharge_enpower_data( + mocked_data: EnvoyData, json_fixture: dict[str, Any] +) -> None: + """Fill envoy encharge/enpower data from fixture.""" + if item := json_fixture["data"].get("encharge_inventory"): + mocked_data.encharge_inventory = {} + for sub_item, item_data in item.items(): + mocked_data.encharge_inventory[sub_item] = EnvoyEncharge(**item_data) + if item := json_fixture["data"].get("enpower"): + mocked_data.enpower = EnvoyEnpower(**item) + if item := json_fixture["data"].get("encharge_aggregate"): + mocked_data.encharge_aggregate = EnvoyEnchargeAggregate(**item) + if item := json_fixture["data"].get("encharge_power"): + mocked_data.encharge_power = {} + for sub_item, item_data in item.items(): + mocked_data.encharge_power[sub_item] = EnvoyEnchargePower(**item_data) + if item := json_fixture["data"].get("tariff"): + mocked_data.tariff = EnvoyTariff(**item) + mocked_data.tariff.storage_settings = EnvoyStorageSettings( + **item["storage_settings"] + ) + if item := json_fixture["data"].get("dry_contact_status"): + mocked_data.dry_contact_status = {} + for sub_item, item_data in item.items(): + mocked_data.dry_contact_status[sub_item] = EnvoyDryContactStatus( + **item_data + ) + if item := json_fixture["data"].get("dry_contact_settings"): + mocked_data.dry_contact_settings = {} + for sub_item, item_data in item.items(): + mocked_data.dry_contact_settings[sub_item] = EnvoyDryContactSettings( + **item_data + ) + + +def _load_json_2_raw_data(mocked_data: EnvoyData, json_fixture: dict[str, Any]) -> None: + """Fill envoy raw data from fixture.""" + if item := json_fixture["data"].get("raw"): + mocked_data.raw = item diff --git a/tests/components/enphase_envoy/fixtures/envoy.json b/tests/components/enphase_envoy/fixtures/envoy.json new file mode 100644 index 00000000000..8c9be429931 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy.json @@ -0,0 +1,49 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy", + "supported_features": 65, + "phase_mode": null, + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 0, + "consumption_meter_type": null, + "production_meter_type": null, + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": null, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": null, + "ctmeter_consumption": null, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": null, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json new file mode 100644 index 00000000000..e72829280da --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json @@ -0,0 +1,130 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 1, phase mode: three, net-consumption CT, production CT", + "supported_features": 1231, + "phase_mode": "three", + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json new file mode 100644 index 00000000000..72b510e2328 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json @@ -0,0 +1,445 @@ +{ + "serial_number": "1234", + "firmware": "7.1.2", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 3, phase mode: split, net-consumption CT, production CT, storage CT", + "supported_features": 1659, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 3, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": "storage", + "data": { + "encharge_inventory": { + "123456": { + "admin_state": 6, + "admin_state_str": "ENCHG_STATE_READY", + "bmu_firmware_version": "2.1.34", + "comm_level_2_4_ghz": 4, + "comm_level_sub_ghz": 4, + "communicating": true, + "dc_switch_off": false, + "encharge_capacity": 3500, + "encharge_revision": 2, + "firmware_loaded_date": 1695330323, + "firmware_version": "2.6.5973_rel/22.11", + "installed_date": 1695330323, + "last_report_date": 1695769447, + "led_status": 17, + "max_cell_temp": 30, + "operating": true, + "part_number": "830-01760-r37", + "percent_full": 15, + "serial_number": "123456", + "temperature": 29, + "temperature_unit": "C", + "zigbee_dongle_fw_version": "100F" + } + }, + "encharge_power": { + "123456": { + "apparent_power_mva": 0, + "real_power_mw": 0, + "soc": 15 + } + }, + "encharge_aggregate": { + "available_energy": 525, + "backup_reserve": 526, + "state_of_charge": 15, + "reserve_state_of_charge": 15, + "configured_reserve_state_of_charge": 15, + "max_available_capacity": 3500 + }, + "enpower": { + "grid_mode": "multimode-ongrid", + "admin_state": 24, + "admin_state_str": "ENPWR_STATE_OPER_CLOSED", + "comm_level_2_4_ghz": 5, + "comm_level_sub_ghz": 5, + "communicating": true, + "firmware_loaded_date": 1695330323, + "firmware_version": "1.2.2064_release/20.34", + "installed_date": 1695330323, + "last_report_date": 1695769447, + "mains_admin_state": "closed", + "mains_oper_state": "closed", + "operating": true, + "part_number": "830-01760-r37", + "serial_number": "654321", + "temperature": 79, + "temperature_unit": "F", + "zigbee_dongle_fw_version": "1009" + }, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1322, + "watt_hours_last_7_days": 1321, + "watt_hours_today": 1323, + "watts_now": 1324 + }, + "L2": { + "watt_hours_lifetime": 2322, + "watt_hours_last_7_days": 2321, + "watt_hours_today": 2323, + "watts_now": 2324 + }, + "L3": { + "watt_hours_lifetime": 3322, + "watt_hours_last_7_days": 3321, + "watt_hours_today": 3323, + "watts_now": 3324 + } + }, + "system_production_phases": { + "L1": { + "watt_hours_lifetime": 1232, + "watt_hours_last_7_days": 1231, + "watt_hours_today": 1233, + "watts_now": 1234 + }, + "L2": { + "watt_hours_lifetime": 2232, + "watt_hours_last_7_days": 2231, + "watt_hours_today": 2233, + "watts_now": 2234 + }, + "L3": { + "watt_hours_lifetime": 3232, + "watt_hours_last_7_days": 3231, + "watt_hours_today": 3233, + "watts_now": 3234 + } + }, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": { + "eid": "100000030", + "timestamp": 1708006120, + "energy_delivered": 31234, + "energy_received": 32345, + "active_power": 103, + "power_factor": 0.23, + "voltage": 113, + "current": 0.4, + "frequency": 50.3, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": { + "L1": { + "eid": "100000031", + "timestamp": 1708006121, + "energy_delivered": 312341, + "energy_received": 323451, + "active_power": 22, + "power_factor": 0.32, + "voltage": 113, + "current": 0.4, + "frequency": 50.3, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000032", + "timestamp": 1708006122, + "energy_delivered": 312342, + "energy_received": 323452, + "active_power": 33, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000033", + "timestamp": 1708006123, + "energy_delivered": 312343, + "energy_received": 323453, + "active_power": 53, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "storage", + "metering_status": "normal", + "status_flags": [] + } + }, + "dry_contact_status": { + "NC1": { + "id": "NC1", + "status": "open" + }, + "NC2": { + "id": "NC2", + "status": "closed" + }, + "NC3": { + "id": "NC3", + "status": "open" + } + }, + "dry_contact_settings": { + "NC1": { + "id": "NC1", + "black_start": 5.0, + "essential_end_time": 32400.0, + "essential_start_time": 57600.0, + "generator_action": "shed", + "grid_action": "shed", + "load_name": "NC1 Fixture", + "manual_override": true, + "micro_grid_action": "shed", + "mode": "manual", + "override": true, + "priority": 1.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 25.0, + "type": "LOAD" + }, + "NC2": { + "id": "NC2", + "black_start": 5.0, + "essential_end_time": 57600.0, + "essential_start_time": 32400.0, + "generator_action": "shed", + "grid_action": "apply", + "load_name": "NC2 Fixture", + "manual_override": true, + "micro_grid_action": "shed", + "mode": "manual", + "override": true, + "priority": 2.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 30.0, + "type": "LOAD" + }, + "NC3": { + "id": "NC3", + "black_start": 5.0, + "essential_end_time": 57600.0, + "essential_start_time": 32400.0, + "generator_action": "apply", + "grid_action": "shed", + "load_name": "NC3 Fixture", + "manual_override": true, + "micro_grid_action": "apply", + "mode": "manual", + "override": true, + "priority": 3.0, + "pv_serial_nb": [], + "soc_high": 70.0, + "soc_low": 30.0, + "type": "NONE" + } + }, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json new file mode 100644 index 00000000000..f9b6ae31196 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json @@ -0,0 +1,260 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 3, phase mode: three, net-consumption CT, production CT", + "supported_features": 1743, + "phase_mode": "three", + "phase_count": 3, + "active_phase_count": 3, + "ct_meter_count": 2, + "consumption_meter_type": "net-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1322, + "watt_hours_last_7_days": 1321, + "watt_hours_today": 1323, + "watts_now": 1324 + }, + "L2": { + "watt_hours_lifetime": 2322, + "watt_hours_last_7_days": 2321, + "watt_hours_today": 2323, + "watts_now": 2324 + }, + "L3": { + "watt_hours_lifetime": 3322, + "watt_hours_last_7_days": 3321, + "watt_hours_today": 3323, + "watts_now": 3324 + } + }, + "system_production_phases": { + "L1": { + "watt_hours_lifetime": 1232, + "watt_hours_last_7_days": 1231, + "watt_hours_today": 1233, + "watts_now": 1234 + }, + "L2": { + "watt_hours_lifetime": 2232, + "watt_hours_last_7_days": 2231, + "watt_hours_today": 2233, + "watts_now": 2234 + }, + "L3": { + "watt_hours_lifetime": 3232, + "watt_hours_last_7_days": 3231, + "watt_hours_today": 3233, + "watts_now": 3234 + } + }, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": { + "L1": { + "eid": "100000011", + "timestamp": 1708006111, + "energy_delivered": 112341, + "energy_received": 123451, + "active_power": 20, + "power_factor": 0.12, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance"] + }, + "L2": { + "eid": "100000012", + "timestamp": 1708006112, + "energy_delivered": 112342, + "energy_received": 123452, + "active_power": 30, + "power_factor": 0.13, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["power-on-unused-phase"] + }, + "L3": { + "eid": "100000013", + "timestamp": 1708006113, + "energy_delivered": 112343, + "energy_received": 123453, + "active_power": 50, + "power_factor": 0.14, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_consumption_phases": { + "L1": { + "eid": "100000021", + "timestamp": 1708006121, + "energy_delivered": 212341, + "energy_received": 223451, + "active_power": 21, + "power_factor": 0.22, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L2": { + "eid": "100000022", + "timestamp": 1708006122, + "energy_delivered": 212342, + "energy_received": 223452, + "active_power": 31, + "power_factor": 0.23, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "L3": { + "eid": "100000023", + "timestamp": 1708006123, + "energy_delivered": 212343, + "energy_received": 223453, + "active_power": 51, + "power_factor": 0.24, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "net-consumption", + "metering_status": "normal", + "status_flags": [] + } + }, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json new file mode 100644 index 00000000000..ca2a976b6d1 --- /dev/null +++ b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json @@ -0,0 +1,125 @@ +{ + "serial_number": "1234", + "firmware": "7.6.175", + "part_number": "123456789", + "envoy_model": "Envoy, phases: 1, phase mode: three, total-consumption CT, production CT", + "supported_features": 1217, + "phase_mode": "three", + "phase_count": 1, + "active_phase_count": 0, + "ct_meter_count": 2, + "consumption_meter_type": "total-consumption", + "production_meter_type": "production", + "storage_meter_type": null, + "data": { + "encharge_inventory": null, + "encharge_power": null, + "encharge_aggregate": null, + "enpower": null, + "system_consumption": null, + "system_production": { + "watt_hours_lifetime": 1234, + "watt_hours_last_7_days": 1234, + "watt_hours_today": 1234, + "watts_now": 1234 + }, + "system_consumption_phases": null, + "system_production_phases": null, + "ctmeter_production": { + "eid": "100000010", + "timestamp": 1708006110, + "energy_delivered": 11234, + "energy_received": 12345, + "active_power": 100, + "power_factor": 0.11, + "voltage": 111, + "current": 0.2, + "frequency": 50.1, + "state": "enabled", + "measurement_type": "production", + "metering_status": "normal", + "status_flags": ["production-imbalance", "power-on-unused-phase"] + }, + "ctmeter_consumption": { + "eid": "100000020", + "timestamp": 1708006120, + "energy_delivered": 21234, + "energy_received": 22345, + "active_power": 101, + "power_factor": 0.21, + "voltage": 112, + "current": 0.3, + "frequency": 50.2, + "state": "enabled", + "measurement_type": "total-consumption", + "metering_status": "normal", + "status_flags": [] + }, + "ctmeter_storage": null, + "ctmeter_production_phases": null, + "ctmeter_consumption_phases": null, + "ctmeter_storage_phases": null, + "dry_contact_status": {}, + "dry_contact_settings": {}, + "inverters": { + "1": { + "serial_number": "1", + "last_report_date": 1, + "last_report_watts": 1, + "max_report_watts": 1 + } + }, + "tariff": { + "currency": { + "code": "EUR" + }, + "logger": "mylogger", + "date": "1695744220", + "storage_settings": { + "mode": "self-consumption", + "operation_mode_sub_type": "", + "reserved_soc": 15.0, + "very_low_soc": 5, + "charge_from_grid": true, + "date": "1695598084" + }, + "single_rate": { + "rate": 0.0, + "sell": 0.0 + }, + "seasons": [ + { + "id": "season_1", + "start": "1/1", + "days": [ + { + "id": "all_days", + "days": "Mon,Tue,Wed,Thu,Fri,Sat,Sun", + "must_charge_start": 444, + "must_charge_duration": 35, + "must_charge_mode": "CG", + "enable_discharge_to_grid": true, + "periods": [ + { + "id": "period_1", + "start": 480, + "rate": 0.1898 + }, + { + "id": "filler", + "start": 1320, + "rate": 0.1034 + } + ] + } + ], + "tiers": [] + } + ], + "seasons_sell": [] + }, + "raw": { + "varies_by": "firmware_version" + } + } +} diff --git a/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..84401c7566b --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_binary_sensor.ambr @@ -0,0 +1,188 @@ +# serializer version: 1 +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Communicating', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'communicating', + 'unique_id': '123456_communicating', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_communicating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Encharge 123456 Communicating', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_communicating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DC switch', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_switch', + 'unique_id': '123456_dc_switch', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.encharge_123456_dc_switch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Encharge 123456 DC switch', + }), + 'context': , + 'entity_id': 'binary_sensor.encharge_123456_dc_switch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.enpower_654321_communicating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Communicating', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'communicating', + 'unique_id': '654321_communicating', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_communicating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Enpower 654321 Communicating', + }), + 'context': , + 'entity_id': 'binary_sensor.enpower_654321_communicating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.enpower_654321_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:transmission-tower', + 'original_name': 'Grid status', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_status', + 'unique_id': '654321_mains_oper_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[envoy_metered_batt_relay][binary_sensor.enpower_654321_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Grid status', + 'icon': 'mdi:transmission-tower', + }), + 'context': , + 'entity_id': 'binary_sensor.enpower_654321_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_config_flow.ambr b/tests/components/enphase_envoy/snapshots/test_config_flow.ambr deleted file mode 100644 index b83d4e811f8..00000000000 --- a/tests/components/enphase_envoy/snapshots/test_config_flow.ambr +++ /dev/null @@ -1,10 +0,0 @@ -# serializer version: 1 -# name: test_platforms - list([ - , - , - , - , - , - ]) -# --- diff --git a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr index acaee292237..e849ab6ee43 100644 --- a/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr +++ b/tests/components/enphase_envoy/snapshots/test_diagnostics.ambr @@ -45,13 +45,14 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'model': 'Envoy', + 'model_id': None, 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.1.2', + 'sw_version': '7.6.175', }), 'entities': list([ dict({ @@ -259,3493 +260,6 @@ 'state': '0.00<>', }), }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '<>_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '<>_daily_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '<>_seven_days_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '<>_lifetime_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'state': '0.00<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '<>_lifetime_net_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'state': '0.02<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '<>_lifetime_net_production', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'state': '0.022345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '<>_net_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'state': '0.101', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '<>_frequency', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '<>_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '<>_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '<>_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l1', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l2', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l3', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '<>_production_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '<>_production_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '<>_lifetime_battery_discharged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'state': '0.03<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '<>_lifetime_battery_charged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'state': '0.032345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '<>_battery_discharge', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current battery discharge', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'state': '0.103', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '<>_storage_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '<>_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '<>_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), ]), }), dict({ @@ -3771,6 +285,7 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', + 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -3863,60 +378,12 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_production': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", - }), - 'ctmeter_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_storage': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_storage_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -3931,61 +398,29 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', - }), - 'system_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', - }), - }), + 'system_consumption': None, + 'system_consumption_phases': None, 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', - }), - }), + 'system_production_phases': None, 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 3, - 'ct_consumption_meter': 'net-consumption', - 'ct_count': 3, - 'ct_production_meter': 'production', - 'ct_storage_meter': 'storage', - 'envoy_firmware': '7.1.2', - 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', 'part_number': '123456789', - 'phase_count': 3, - 'phase_mode': 'three', + 'phase_count': 1, + 'phase_mode': None, 'supported_features': list([ 'INVERTERS', - 'METERING', 'PRODUCTION', - 'THREEPHASE', - 'CTMETERS', ]), }), 'fixtures': dict({ @@ -4042,13 +477,14 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'model': 'Envoy', + 'model_id': None, 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.1.2', + 'sw_version': '7.6.175', }), 'entities': list([ dict({ @@ -4256,3493 +692,6 @@ 'state': '0.00<>', }), }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '<>_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '<>_daily_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '<>_seven_days_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '<>_lifetime_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'state': '0.00<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '<>_lifetime_net_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'state': '0.02<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '<>_lifetime_net_production', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'state': '0.022345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '<>_net_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'state': '0.101', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '<>_frequency', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '<>_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '<>_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '<>_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l1', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l2', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l3', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '<>_production_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '<>_production_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '<>_lifetime_battery_discharged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'state': '0.03<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '<>_lifetime_battery_charged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'state': '0.032345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '<>_battery_discharge', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current battery discharge', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'state': '0.103', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '<>_storage_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '<>_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '<>_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), ]), }), dict({ @@ -7768,6 +717,7 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', + 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -7860,60 +810,12 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_production': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", - }), - 'ctmeter_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_storage': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_storage_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -7928,61 +830,29 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', - }), - 'system_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', - }), - }), + 'system_consumption': None, + 'system_consumption_phases': None, 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', - }), - }), + 'system_production_phases': None, 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 3, - 'ct_consumption_meter': 'net-consumption', - 'ct_count': 3, - 'ct_production_meter': 'production', - 'ct_storage_meter': 'storage', - 'envoy_firmware': '7.1.2', - 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', 'part_number': '123456789', - 'phase_count': 3, - 'phase_mode': 'three', + 'phase_count': 1, + 'phase_mode': None, 'supported_features': list([ 'INVERTERS', - 'METERING', 'PRODUCTION', - 'THREEPHASE', - 'CTMETERS', ]), }), 'fixtures': dict({ @@ -8079,13 +949,14 @@ 'labels': list([ ]), 'manufacturer': 'Enphase', - 'model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'model': 'Envoy', + 'model_id': None, 'name': 'Envoy <>', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', 'serial_number': '<>', 'suggested_area': None, - 'sw_version': '7.1.2', + 'sw_version': '7.6.175', }), 'entities': list([ dict({ @@ -8293,3493 +1164,6 @@ 'state': '0.00<>', }), }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '<>_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_power_consumption', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '<>_daily_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_today', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '<>_seven_days_consumption', - 'unit_of_measurement': 'kWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': 'kWh', - }), - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days', - 'state': '1.234', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '<>_lifetime_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption', - 'state': '0.00<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '<>_production_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '<>_daily_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '<>_seven_days_production_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '<>_lifetime_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l1', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l2', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '<>_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '<>_daily_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '<>_seven_days_consumption_l3', - 'unit_of_measurement': 'kWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '<>_lifetime_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '<>_lifetime_net_consumption', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption', - 'state': '0.02<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '<>_lifetime_net_production', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production', - 'state': '0.022345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '<>_net_consumption', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption', - 'state': '0.101', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '<>_frequency', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '<>_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '<>_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '<>_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l1', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l2', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '<>_lifetime_net_consumption_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '<>_lifetime_net_production_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '<>_net_consumption_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'frequency', - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '<>_frequency_l3', - 'unit_of_measurement': 'Hz', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '<>_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '<>_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '<>_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '<>_production_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '<>_production_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '<>_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '<>_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '<>_lifetime_battery_discharged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged', - 'state': '0.03<>', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '<>_lifetime_battery_charged', - 'unit_of_measurement': 'MWh', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy <> Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': 'total_increasing', - 'unit_of_measurement': 'MWh', - }), - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged', - 'state': '0.032345', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '<>_battery_discharge', - 'unit_of_measurement': 'kW', - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'power', - 'friendly_name': 'Envoy <> Current battery discharge', - 'icon': 'mdi:flash', - 'state_class': 'measurement', - 'unit_of_measurement': 'kW', - }), - 'entity_id': 'sensor.envoy_<>_current_battery_discharge', - 'state': '0.103', - }), - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '<>_storage_voltage', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '<>_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '<>_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l1', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l1', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l1', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l2', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l2', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l2', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '<>_lifetime_battery_discharged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'total_increasing', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'MWh', - }), - }), - 'original_device_class': 'energy', - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '<>_lifetime_battery_charged_l3', - 'unit_of_measurement': 'MWh', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'kW', - }), - }), - 'original_device_class': 'power', - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '<>_battery_discharge_l3', - 'unit_of_measurement': 'kW', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'state_class': 'measurement', - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': 'V', - }), - }), - 'original_device_class': 'voltage', - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '<>_storage_voltage_l3', - 'unit_of_measurement': 'V', - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'normal', - 'not-metering', - 'check-wiring', - ]), - }), - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': 'enum', - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '<>_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), - dict({ - 'entity': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': '45a36e55aaddb2007c5f6602e0c38e72', - 'device_class': None, - 'disabled_by': 'integration', - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_<>_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '<>_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - 'state': None, - }), ]), }), dict({ @@ -11805,6 +1189,7 @@ ]), 'manufacturer': 'Enphase', 'model': 'Inverter', + 'model_id': None, 'name': 'Inverter 1', 'name_by_user': None, 'primary_config_entry': '45a36e55aaddb2007c5f6602e0c38e72', @@ -11897,60 +1282,12 @@ }), ]), 'envoy_model_data': dict({ - 'ctmeter_consumption': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000020', timestamp=1708006120, energy_delivered=21234, energy_received=22345, active_power=101, power_factor=0.21, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000021', timestamp=1708006121, energy_delivered=212341, energy_received=223451, active_power=21, power_factor=0.22, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000022', timestamp=1708006122, energy_delivered=212342, energy_received=223452, active_power=31, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000023', timestamp=1708006123, energy_delivered=212343, energy_received=223453, active_power=51, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_production': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000010', timestamp=1708006110, energy_delivered=11234, energy_received=12345, active_power=100, power_factor=0.11, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[, ])", - }), - 'ctmeter_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000011', timestamp=1708006111, energy_delivered=112341, energy_received=123451, active_power=20, power_factor=0.12, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000012', timestamp=1708006112, energy_delivered=112342, energy_received=123452, active_power=30, power_factor=0.13, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000013', timestamp=1708006113, energy_delivered=112343, energy_received=123453, active_power=50, power_factor=0.14, voltage=111, current=0.2, frequency=50.1, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), - 'ctmeter_storage': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000030', timestamp=1708006120, energy_delivered=31234, energy_received=32345, active_power=103, power_factor=0.23, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'ctmeter_storage_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000031', timestamp=1708006121, energy_delivered=312341, energy_received=323451, active_power=22, power_factor=0.32, voltage=113, current=0.4, frequency=50.3, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L2': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000032', timestamp=1708006122, energy_delivered=312342, energy_received=323452, active_power=33, power_factor=0.23, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - 'L3': dict({ - '__type': "", - 'repr': "EnvoyMeterData(eid='100000033', timestamp=1708006123, energy_delivered=312343, energy_received=323453, active_power=53, power_factor=0.24, voltage=112, current=0.3, frequency=50.2, state=, measurement_type=, metering_status=, status_flags=[])", - }), - }), + 'ctmeter_consumption': None, + 'ctmeter_consumption_phases': None, + 'ctmeter_production': None, + 'ctmeter_production_phases': None, + 'ctmeter_storage': None, + 'ctmeter_storage_phases': None, 'dry_contact_settings': dict({ }), 'dry_contact_status': dict({ @@ -11965,61 +1302,29 @@ 'repr': "EnvoyInverter(serial_number='1', last_report_date=1, last_report_watts=1, max_report_watts=1)", }), }), - 'system_consumption': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', - }), - 'system_consumption_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=1322, watt_hours_last_7_days=1321, watt_hours_today=1323, watts_now=1324)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=2322, watt_hours_last_7_days=2321, watt_hours_today=2323, watts_now=2324)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemConsumption(watt_hours_lifetime=3322, watt_hours_last_7_days=3321, watt_hours_today=3323, watts_now=3324)', - }), - }), + 'system_consumption': None, + 'system_consumption_phases': None, 'system_production': dict({ '__type': "", 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1234, watt_hours_last_7_days=1234, watt_hours_today=1234, watts_now=1234)', }), - 'system_production_phases': dict({ - 'L1': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=1232, watt_hours_last_7_days=1231, watt_hours_today=1233, watts_now=1234)', - }), - 'L2': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=2232, watt_hours_last_7_days=2231, watt_hours_today=2233, watts_now=2234)', - }), - 'L3': dict({ - '__type': "", - 'repr': 'EnvoySystemProduction(watt_hours_lifetime=3232, watt_hours_last_7_days=3231, watt_hours_today=3233, watts_now=3234)', - }), - }), + 'system_production_phases': None, 'tariff': None, }), 'envoy_properties': dict({ - 'active_phasecount': 3, - 'ct_consumption_meter': 'net-consumption', - 'ct_count': 3, - 'ct_production_meter': 'production', - 'ct_storage_meter': 'storage', - 'envoy_firmware': '7.1.2', - 'envoy_model': 'Envoy, phases: 3, phase mode: three, net-consumption CT, production CT, storage CT', + 'active_phasecount': 0, + 'ct_consumption_meter': None, + 'ct_count': 0, + 'ct_production_meter': None, + 'ct_storage_meter': None, + 'envoy_firmware': '7.6.175', + 'envoy_model': 'Envoy', 'part_number': '123456789', - 'phase_count': 3, - 'phase_mode': 'three', + 'phase_count': 1, + 'phase_mode': None, 'supported_features': list([ 'INVERTERS', - 'METERING', 'PRODUCTION', - 'THREEPHASE', - 'CTMETERS', ]), }), 'fixtures': dict({ diff --git a/tests/components/enphase_envoy/snapshots/test_number.ambr b/tests/components/enphase_envoy/snapshots/test_number.ambr new file mode 100644 index 00000000000..6310911c27e --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_number.ambr @@ -0,0 +1,394 @@ +# serializer version: 1 +# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.enpower_654321_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '654321_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.enpower_654321_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Enpower 654321 Reserve battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.enpower_654321_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc1_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC1_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC1 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc1_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc1_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC1_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc1_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC1 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc1_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc2_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC2_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC2 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc2_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc2_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC2_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc2_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC2 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc2_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc3_fixture_cutoff_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cutoff battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cutoff_battery_level', + 'unique_id': '654321_relay_NC3_soc_low', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_cutoff_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC3 Fixture Cutoff battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc3_fixture_cutoff_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.0', + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.nc3_fixture_restore_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restore battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'restore_battery_level', + 'unique_id': '654321_relay_NC3_soc_high', + 'unit_of_measurement': None, + }) +# --- +# name: test_number[envoy_metered_batt_relay][number.nc3_fixture_restore_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'NC3 Fixture Restore battery level', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.nc3_fixture_restore_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '70.0', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_select.ambr b/tests/components/enphase_envoy/snapshots/test_select.ambr new file mode 100644 index 00000000000..10f15820ac4 --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_select.ambr @@ -0,0 +1,754 @@ +# serializer version: 1 +# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.enpower_654321_storage_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storage mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_mode', + 'unique_id': '654321_storage_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.enpower_654321_storage_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Storage mode', + 'options': list([ + 'backup', + 'self_consumption', + 'savings', + ]), + }), + 'context': , + 'entity_id': 'select.enpower_654321_storage_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC1_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC1_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC1_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc1_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC1_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc1_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc1_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC2_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC2_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC2_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc2_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC2_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc2_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc2_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_generator_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Generator action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_generator_action', + 'unique_id': '654321_relay_NC3_generator_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_generator_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Generator action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_generator_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_grid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_grid_action', + 'unique_id': '654321_relay_NC3_grid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_grid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Grid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_grid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_microgrid_action', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Microgrid action', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_microgrid_action', + 'unique_id': '654321_relay_NC3_microgrid_action', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_microgrid_action-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Microgrid action', + 'options': list([ + 'powered', + 'not_powered', + 'schedule', + 'none', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_microgrid_action', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'powered', + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.nc3_fixture_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Mode', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'relay_mode', + 'unique_id': '654321_relay_NC3_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[envoy_metered_batt_relay][select.nc3_fixture_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture Mode', + 'options': list([ + 'standard', + 'battery', + ]), + }), + 'context': , + 'entity_id': 'select.nc3_fixture_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'standard', + }) +# --- diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index e403886b096..dde6a6add41 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -1,3429 +1,46 @@ # serializer version: 1 -# name: test_sensor - list([ - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production', - 'unique_id': '1234_production', - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production', - 'unique_id': '1234_daily_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production', - 'unique_id': '1234_seven_days_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production', - 'unique_id': '1234_lifetime_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption', - 'unique_id': '1234_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 2, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption', - 'unique_id': '1234_daily_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption', - 'unique_id': '1234_seven_days_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption', - 'unique_id': '1234_lifetime_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_production_phase', - 'unique_id': '1234_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_production_phase', - 'unique_id': '1234_daily_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy production last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_production_phase', - 'unique_id': '1234_seven_days_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_production_phase', - 'unique_id': '1234_lifetime_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'current_power_consumption_phase', - 'unique_id': '1234_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption today l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'daily_consumption_phase', - 'unique_id': '1234_daily_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Energy consumption last seven days l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'seven_days_consumption_phase', - 'unique_id': '1234_seven_days_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_consumption_phase', - 'unique_id': '1234_lifetime_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption', - 'unique_id': '1234_lifetime_net_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production', - 'unique_id': '1234_lifetime_net_production', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption', - 'unique_id': '1234_net_consumption', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency', - 'unique_id': '1234_frequency', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage', - 'unique_id': '1234_voltage', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status', - 'unique_id': '1234_net_consumption_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags', - 'unique_id': '1234_net_consumption_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_consumption_phase', - 'unique_id': '1234_lifetime_net_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime net energy production l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_net_production_phase', - 'unique_id': '1234_lifetime_net_production_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current net power consumption l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_consumption_phase', - 'unique_id': '1234_net_consumption_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Frequency net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_frequency_phase', - 'unique_id': '1234_frequency_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_voltage_phase', - 'unique_id': '1234_voltage_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_metering_status_phase', - 'unique_id': '1234_net_consumption_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active net consumption CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'net_ct_status_flags_phase', - 'unique_id': '1234_net_consumption_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status', - 'unique_id': '1234_production_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags', - 'unique_id': '1234_production_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_metering_status_phase', - 'unique_id': '1234_production_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active production CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'production_ct_status_flags_phase', - 'unique_id': '1234_production_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged', - 'unique_id': '1234_lifetime_battery_discharged', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged', - 'unique_id': '1234_lifetime_battery_charged', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge', - 'unique_id': '1234_battery_discharge', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage', - 'unique_id': '1234_storage_voltage', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status', - 'unique_id': '1234_storage_ct_metering_status', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags', - 'unique_id': '1234_storage_ct_status_flags', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l1', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l1', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l2', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l2', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l2', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy discharged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_discharged_phase', - 'unique_id': '1234_lifetime_battery_discharged_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Lifetime battery energy charged l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lifetime_battery_charged_phase', - 'unique_id': '1234_lifetime_battery_charged_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Current battery discharge l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'battery_discharge_phase', - 'unique_id': '1234_battery_discharge_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Voltage storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_voltage_phase', - 'unique_id': '1234_storage_voltage_l3', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Metering status storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_metering_status_phase', - 'unique_id': '1234_storage_ct_metering_status_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:flash', - 'original_name': 'Meter status flags active storage CT l3', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'storage_ct_status_flags_phase', - 'unique_id': '1234_storage_ct_status_flags_l3', - 'unit_of_measurement': None, - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': None, - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '1', - 'unit_of_measurement': , - }), - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_1_last_reported', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': 'mdi:flash', - 'original_name': 'Last reported', - 'platform': 'enphase_envoy', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_reported', - 'unique_id': '1_last_reported', - 'unit_of_measurement': None, - }), - ]) -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current battery discharge', - 'icon': 'mdi:flash', + 'area_id': None, + 'capabilities': dict({ 'state_class': , - 'unit_of_measurement': , }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_battery_discharge', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.103', + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_battery_discharge_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current net power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.101', - }) -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_net_power_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Envoy 1234 Current power consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_current_power_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production-state] +# name: test_sensor[envoy][sensor.envoy_1234_current_power_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3440,67 +57,46 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_current_power_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption last seven days', - 'icon': 'mdi:flash', - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Energy consumption today', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_energy_consumption_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_consumption_today_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days-state] +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_last_seven_days-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3516,16 +112,48 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l1-state] - None +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_last_seven_days_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today-state] +# name: test_sensor[envoy][sensor.envoy_1234_energy_production_today-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3542,106 +170,48 @@ 'state': '1.234', }) # --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_energy_production_today_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_frequency_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy charged', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.032345', + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.031234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.001234', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production-state] +# name: test_sensor[envoy][sensor.envoy_1234_lifetime_energy_production-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'energy', @@ -3658,164 +228,42 @@ 'state': '0.001234', }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_energy_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , +# name: test_sensor[envoy][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.021234', + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , }) # --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Envoy 1234 Lifetime net energy production', - 'icon': 'mdi:flash', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.022345', - }) -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_lifetime_net_energy_production_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_production_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_metering_status_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_net_consumption_ct_l3-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l1-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l2-state] - None -# --- -# name: test_sensor[sensor.envoy_1234_voltage_storage_ct_l3-state] - None -# --- -# name: test_sensor[sensor.inverter_1-state] +# name: test_sensor[envoy][sensor.inverter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'device_class': 'power', @@ -3832,6 +280,11330 @@ 'state': '1', }) # --- -# name: test_sensor[sensor.inverter_1_last_reported-state] - None +# name: test_sensor[envoy][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Apparent power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_apparent_power_mva', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_apparent_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'apparent_power', + 'friendly_name': 'Encharge 123456 Apparent power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_apparent_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Encharge 123456 Battery', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '123456_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Encharge 123456 Last reported', + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-09-26T23:04:07+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_real_power_mw', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Encharge 123456 Power', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.encharge_123456_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.encharge_123456_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Encharge 123456 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.encharge_123456_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.enpower_654321_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '654321_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Enpower 654321 Last reported', + }), + 'context': , + 'entity_id': 'sensor.enpower_654321_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-09-26T23:04:07+00:00', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.enpower_654321_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.enpower_654321_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Enpower 654321 Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.enpower_654321_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Available battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'available_energy', + 'unique_id': '1234_available_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_available_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Available battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_available_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '525', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Battery', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Battery capacity', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_capacity', + 'unique_id': '1234_max_capacity', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery_capacity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Battery capacity', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_battery_capacity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3500', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge', + 'unique_id': '1234_battery_discharge', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.103', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.033', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current battery discharge l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_discharge_phase', + 'unique_id': '1234_battery_discharge_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_battery_discharge_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current battery discharge l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_battery_discharge_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.053', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.324', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_current_power_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_consumption_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.323', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.231', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_energy_production_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.233', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged', + 'unique_id': '1234_lifetime_battery_charged', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.032345', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323451', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323452', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy charged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_charged_phase', + 'unique_id': '1234_lifetime_battery_charged_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy charged l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_charged_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.323453', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged', + 'unique_id': '1234_lifetime_battery_discharged', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312342', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime battery energy discharged l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_battery_discharged_phase', + 'unique_id': '1234_lifetime_battery_discharged_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_discharged_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime battery energy discharged l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_battery_energy_discharged_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.312343', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003322', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003232', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags', + 'unique_id': '1234_storage_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_status_flags_phase', + 'unique_id': '1234_storage_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_meter_status_flags_active_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active storage CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status', + 'unique_id': '1234_storage_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_metering_status_phase', + 'unique_id': '1234_storage_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_metering_status_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status storage CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery energy', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_energy', + 'unique_id': '1234_reserve_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Reserve battery energy', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '526', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Reserve battery level', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reserve_soc', + 'unique_id': '1234_reserve_soc', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Envoy 1234 Reserve battery level', + 'icon': 'mdi:flash', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_reserve_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage', + 'unique_id': '1234_storage_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '113', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '113', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_voltage_phase', + 'unique_id': '1234_storage_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption', + 'unique_id': '1234_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.101', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.031', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_consumption_phase', + 'unique_id': '1234_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.051', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption', + 'unique_id': '1234_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_consumption_phase', + 'unique_id': '1234_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.324', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production_phase', + 'unique_id': '1234_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_power_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption', + 'unique_id': '1234_seven_days_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_consumption_phase', + 'unique_id': '1234_seven_days_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption', + 'unique_id': '1234_daily_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy consumption today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_consumption_phase', + 'unique_id': '1234_daily_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_consumption_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy consumption today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_consumption_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.323', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l1', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l2', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production_phase', + 'unique_id': '1234_seven_days_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_last_seven_days_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days l3', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.231', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production_phase', + 'unique_id': '1234_daily_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_energy_production_today_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.233', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency', + 'unique_id': '1234_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_frequency_phase', + 'unique_id': '1234_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption', + 'unique_id': '1234_lifetime_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_consumption_phase', + 'unique_id': '1234_lifetime_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003322', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.002232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production_phase', + 'unique_id': '1234_lifetime_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.003232', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption', + 'unique_id': '1234_lifetime_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.021234', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212342', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_consumption_phase', + 'unique_id': '1234_lifetime_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.212343', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production', + 'unique_id': '1234_lifetime_net_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.022345', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223451', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223452', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime net energy production l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_net_production_phase', + 'unique_id': '1234_lifetime_net_production_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_net_energy_production_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime net energy production l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_net_energy_production_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.223453', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags', + 'unique_id': '1234_net_consumption_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_status_flags_phase', + 'unique_id': '1234_net_consumption_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active net consumption CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l1', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l2', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags_phase', + 'unique_id': '1234_production_ct_status_flags_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_meter_status_flags_active_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT l3', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status', + 'unique_id': '1234_net_consumption_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_metering_status_phase', + 'unique_id': '1234_net_consumption_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status net consumption CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l1', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l2', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status_phase', + 'unique_id': '1234_production_ct_metering_status_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_metering_status_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT l3', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage', + 'unique_id': '1234_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_voltage_phase', + 'unique_id': '1234_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '112', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Current power production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power_production', + 'unique_id': '1234_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 Current power production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_current_power_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production last seven days', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'seven_days_production', + 'unique_id': '1234_seven_days_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_last_seven_days-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production last seven days', + 'icon': 'mdi:flash', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_last_seven_days', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Energy production today', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'daily_production', + 'unique_id': '1234_daily_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_energy_production_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Energy production today', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_energy_production_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime energy production', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_production', + 'unique_id': '1234_lifetime_production', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime energy production', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_energy_production', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.001234', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:flash', + 'original_name': 'Meter status flags active production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_status_flags', + 'unique_id': '1234_production_ct_status_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_meter_status_flags_active_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Envoy 1234 Meter status flags active production CT', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_meter_status_flags_active_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Metering status production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_metering_status', + 'unique_id': '1234_production_ct_metering_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_metering_status_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Envoy 1234 Metering status production CT', + 'icon': 'mdi:flash', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_metering_status_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'normal', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_last_reported', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Last reported', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_reported', + 'unique_id': '1_last_reported', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1_last_reported-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Inverter 1 Last reported', + 'icon': 'mdi:flash', + }), + 'context': , + 'entity_id': 'sensor.inverter_1_last_reported', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:01+00:00', + }) # --- diff --git a/tests/components/enphase_envoy/snapshots/test_switch.ambr b/tests/components/enphase_envoy/snapshots/test_switch.ambr new file mode 100644 index 00000000000..a5dafd735b5 --- /dev/null +++ b/tests/components/enphase_envoy/snapshots/test_switch.ambr @@ -0,0 +1,231 @@ +# serializer version: 1 +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.enpower_654321_charge_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge from grid', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_from_grid', + 'unique_id': '654321_charge_from_grid', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_charge_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Charge from grid', + }), + 'context': , + 'entity_id': 'switch.enpower_654321_charge_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.enpower_654321_grid_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid enabled', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_enabled', + 'unique_id': '654321_mains_admin_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.enpower_654321_grid_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Enpower 654321 Grid enabled', + }), + 'context': , + 'entity_id': 'switch.enpower_654321_grid_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc1_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC1_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc1_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC1 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc1_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc2_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC2_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc2_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC2 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc2_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.nc3_fixture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '654321_relay_NC3_relay_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch[envoy_metered_batt_relay][switch.nc3_fixture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'NC3 Fixture', + }), + 'context': , + 'entity_id': 'switch.nc3_fixture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/enphase_envoy/test_binary_sensor.py b/tests/components/enphase_envoy/test_binary_sensor.py new file mode 100644 index 00000000000..883df4be6fc --- /dev/null +++ b/tests/components/enphase_envoy/test_binary_sensor.py @@ -0,0 +1,89 @@ +"""Test Enphase Envoy binary sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.const import STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor platform entities against snapshot.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_binary_sensor( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities are not created.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_binary_sensor_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test binary sensor entities values and names.""" + with patch( + "homeassistant.components.enphase_envoy.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + entity_base = f"{Platform.BINARY_SENSOR}.enpower" + + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) + assert entity_state.state == STATE_ON + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_grid_status")) + assert entity_state.state == STATE_ON + + entity_base = f"{Platform.BINARY_SENSOR}.encharge" + + for sn in mock_envoy.data.encharge_inventory: + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_communicating")) + assert entity_state.state == STATE_ON + assert (entity_state := hass.states.get(f"{entity_base}_{sn}_dc_switch")) + assert entity_state.state == STATE_ON diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index b60b03e5df9..c2cc02fcc7c 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -6,178 +6,144 @@ from unittest.mock import AsyncMock from pyenphase import EnvoyAuthenticationError, EnvoyError import pytest -from syrupy.assertion import SnapshotAssertion -from homeassistant import config_entries from homeassistant.components import zeroconf from homeassistant.components.enphase_envoy.const import ( DOMAIN, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, - PLATFORMS, +) +from homeassistant.config_entries import ( + SOURCE_REAUTH, + SOURCE_RECONFIGURE, + SOURCE_USER, + SOURCE_ZEROCONF, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry _LOGGER = logging.getLogger(__name__) -async def test_form(hass: HomeAssistant, config, setup_enphase_envoy) -> None: +async def test_form( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy 1234" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 1234" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize("serial_number", [None]) async def test_user_no_serial_number( - hass: HomeAssistant, config, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test user setup without a serial number.""" + mock_envoy.serial_number = None result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize("serial_number", [None]) -async def test_user_fetching_serial_fails( - hass: HomeAssistant, setup_enphase_envoy +async def test_form_invalid_auth( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: - """Test user setup without a serial number.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {} - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", - }, - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy", - "username": "test-username", - "password": "test-password", - } - - -@pytest.mark.parametrize( - "mock_authenticate", - [ - AsyncMock(side_effect=EnvoyAuthenticationError("test")), - ], -) -async def test_form_invalid_auth(hass: HomeAssistant, setup_enphase_envoy) -> None: """Test we handle invalid auth.""" + mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( + "fail authentication" + ) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "invalid_auth"} @pytest.mark.parametrize( - "mock_setup", - [AsyncMock(side_effect=EnvoyError)], + ("exception", "error"), + [ + (EnvoyError, "cannot_connect"), + (ValueError, "unknown"), + ], ) -async def test_form_cannot_connect(hass: HomeAssistant, setup_enphase_envoy) -> None: +async def test_form_cannot_connect( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, + exception: Exception, + error: str, +) -> None: """Test we handle cannot connect error.""" + mock_envoy.setup.side_effect = exception result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -@pytest.mark.parametrize( - "mock_setup", - [AsyncMock(side_effect=ValueError)], -) -async def test_form_unknown_error(hass: HomeAssistant, setup_enphase_envoy) -> None: - """Test we handle unknown error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", - }, - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} def _get_schema_default(schema, key_name): @@ -189,12 +155,14 @@ def _get_schema_default(schema, key_name): async def test_zeroconf_pre_token_firmware( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -208,35 +176,38 @@ async def test_zeroconf_pre_token_firmware( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, "username") == "installer" + assert ( + _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "installer" + ) - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Envoy 1234" - assert result2["result"].unique_id == "1234" - assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 1234" + assert result["result"].unique_id == "1234" + assert result["data"] == { + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } async def test_zeroconf_token_firmware( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can setup from zeroconf.""" result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -249,102 +220,101 @@ async def test_zeroconf_token_firmware( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert _get_schema_default(result["data_schema"].schema, "username") == "" + assert _get_schema_default(result["data_schema"].schema, CONF_USERNAME) == "" result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Envoy 1234" assert result2["result"].unique_id == "1234" assert result2["data"] == { - "host": "1.1.1.1", - "name": "Envoy 1234", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", } -@pytest.mark.parametrize( - "mock_authenticate", - [ - AsyncMock( - side_effect=[ - None, - EnvoyAuthenticationError("fail authentication"), - None, - ] - ), - ], -) async def test_form_host_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test changing credentials for existing host.""" + config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} # existing config - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" + + mock_envoy.authenticate.side_effect = EnvoyAuthenticationError( + "fail authentication" + ) # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "wrong-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "wrong-password", }, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + + mock_envoy.authenticate.side_effect = None # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" # mock successful authentication and update of credentials - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "changed-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "changed-password", }, ) await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" # updated config with new ip and changed pw - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "changed-password" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "changed-password" async def test_zeroconf_serial_already_exists( hass: HomeAssistant, - config_entry, - setup_enphase_envoy, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, caplog: pytest.LogCaptureFixture, ) -> None: """Test serial number already exists from zeroconf.""" _LOGGER.setLevel(logging.DEBUG) + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -355,21 +325,24 @@ async def test_zeroconf_serial_already_exists( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "4.4.4.4" + assert config_entry.data[CONF_HOST] == "4.4.4.4" assert "Zeroconf ip 4 processing 4.4.4.4, current hosts: {'1.1.1.1'}" in caplog.text async def test_zeroconf_serial_already_exists_ignores_ipv6( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test serial number already exists from zeroconf but the discovery is ipv6.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], @@ -384,17 +357,21 @@ async def test_zeroconf_serial_already_exists_ignores_ipv6( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "not_ipv4_address" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" -@pytest.mark.parametrize("serial_number", [None]) async def test_zeroconf_host_already_exists( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test hosts already exists from zeroconf.""" + mock_envoy.serial_number = None + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -405,7 +382,6 @@ async def test_zeroconf_host_already_exists( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" @@ -414,17 +390,21 @@ async def test_zeroconf_host_already_exists( async def test_zero_conf_while_form( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test zeroconf while form is active.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -435,26 +415,29 @@ async def test_zero_conf_while_form( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_second_envoy_while_form( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test zeroconf while form is active.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result2 = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("4.4.4.4"), ip_addresses=[ip_address("4.4.4.4")], @@ -465,50 +448,51 @@ async def test_zero_conf_second_envoy_while_form( type="mock_type", ), ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert config_entry.data["host"] == "1.1.1.1" + assert result["type"] is FlowResultType.FORM + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" - result3 = await hass.config_entries.flow.async_configure( + result2 = await hass.config_entries.flow.async_configure( result2["flow_id"], { - "host": "4.4.4.4", - "username": "test-username", - "password": "test-password", + CONF_HOST: "4.4.4.4", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Envoy 4321" - assert result3["result"].unique_id == "4321" + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Envoy 4321" + assert result2["result"].unique_id == "4321" result4 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result4["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial_property( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf properties.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM with pytest.raises(KeyError) as ex: await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -521,30 +505,33 @@ async def test_zero_conf_malformed_serial_property( ) assert "serialnum" in str(ex.value) - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT + assert result["type"] is FlowResultType.ABORT async def test_zero_conf_malformed_serial( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf properties.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - result2 = await hass.config_entries.flow.async_init( + result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -555,34 +542,36 @@ async def test_zero_conf_malformed_serial( type="mock_type", ), ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM + assert result["type"] is FlowResultType.FORM - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result = await hass.config_entries.flow.async_configure( + result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Envoy 12%4" + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Envoy 12%4" async def test_zero_conf_malformed_fw_property( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test malformed zeroconf property.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1")], @@ -593,25 +582,26 @@ async def test_zero_conf_malformed_fw_property( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert config_entry.data["host"] == "1.1.1.1" + assert config_entry.data[CONF_HOST] == "1.1.1.1" assert config_entry.unique_id == "1234" assert config_entry.title == "Envoy 1234" async def test_zero_conf_old_blank_entry( - hass: HomeAssistant, setup_enphase_envoy + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test re-using old blank entry.""" entry = MockConfigEntry( domain=DOMAIN, data={ - "host": "1.1.1.1", - "username": "", - "password": "", - "name": "unknown", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "", + CONF_PASSWORD: "", + CONF_NAME: "unknown", }, unique_id=None, title="Envoy", @@ -619,7 +609,7 @@ async def test_zero_conf_old_blank_entry( entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_ZEROCONF}, + context={"source": SOURCE_ZEROCONF}, data=zeroconf.ZeroconfServiceInfo( ip_address=ip_address("1.1.1.1"), ip_addresses=[ip_address("1.1.1.1"), ip_address("1.1.1.2")], @@ -630,21 +620,26 @@ async def test_zero_conf_old_blank_entry( type="mock_type", ), ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - assert entry.data["host"] == "1.1.1.1" + assert entry.data[CONF_HOST] == "1.1.1.1" assert entry.unique_id == "1234" assert entry.title == "Envoy 1234" -async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> None: +async def test_reauth( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, +) -> None: """Test we reauth auth.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_REAUTH, + "source": SOURCE_REAUTH, "unique_id": config_entry.unique_id, "entry_id": config_entry.entry_id, }, @@ -652,19 +647,22 @@ async def test_reauth(hass: HomeAssistant, config_entry, setup_enphase_envoy) -> result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - "username": "test-username", - "password": "test-password", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" async def test_options_default( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can configure options.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -682,9 +680,13 @@ async def test_options_default( async def test_options_set( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can configure options.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" @@ -697,13 +699,17 @@ async def test_options_set( async def test_reconfigure( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we can reconfiger the entry.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -712,36 +718,40 @@ async def test_reconfigure( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username2", - "password": "test-password2", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username2", + CONF_PASSWORD: "test-password2", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # changed entry - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username2" - assert config_entry.data["password"] == "test-password2" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username2" + assert config_entry.data[CONF_PASSWORD] == "test-password2" async def test_reconfigure_nochange( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test we get the reconfigure form and apply nochange.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -750,36 +760,40 @@ async def test_reconfigure_nochange( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "test-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", }, ) await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # unchanged original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" async def test_reconfigure_otherenvoy( - hass: HomeAssistant, config_entry, setup_enphase_envoy, mock_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test entering ip of other envoy and prevent changing it based on serial.""" + await setup_integration(hass, config_entry) result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -790,67 +804,67 @@ async def test_reconfigure_otherenvoy( # let mock return different serial from first time, sim it's other one on changed ip mock_envoy.serial_number = "45678" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "new-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "new-password", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unexpected_envoy"} + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "unexpected_envoy"} # entry should still be original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" # set serial back to original to finsich flow mock_envoy.serial_number = "1234" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], + result = await hass.config_entries.flow.async_configure( + result["flow_id"], { - "host": "1.1.1.1", - "username": "test-username", - "password": "new-password", + CONF_HOST: "1.1.1.1", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "new-password", }, ) - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "new-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "new-password" @pytest.mark.parametrize( - "mock_authenticate", + ("exception", "error"), [ - AsyncMock( - side_effect=[ - None, - EnvoyAuthenticationError("fail authentication"), - EnvoyError("cannot_connect"), - Exception("Unexpected exception"), - None, - ] - ), + (EnvoyAuthenticationError("fail authentication"), "invalid_auth"), + (EnvoyError, "cannot_connect"), + (Exception, "unknown"), ], ) async def test_reconfigure_auth_failure( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, + exception: Exception, + error: str, ) -> None: """Test changing credentials for existing host with auth failure.""" + await setup_integration(hass, config_entry) + result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -858,84 +872,51 @@ async def test_reconfigure_auth_failure( assert result["errors"] == {} # existing config - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" + + mock_envoy.authenticate.side_effect = exception # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "wrong-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "wrong-password", }, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_auth"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" - - # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.2", - "username": "new-username", - "password": "wrong-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" - - # mock failing authentication on first try - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "host": "1.1.1.2", - "username": "other-username", - "password": "test-password", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} - - # still original config after failure - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + mock_envoy.authenticate.side_effect = None # mock successful authentication and update of credentials - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "changed-password", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "changed-password", }, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert result3["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated config with new ip and changed pw - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "changed-password" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "changed-password" async def test_reconfigure_change_ip_to_existing( - hass: HomeAssistant, config_entry, setup_enphase_envoy + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, + mock_envoy: AsyncMock, ) -> None: """Test reconfiguration to existing entry with same ip does not harm existing one.""" + await setup_integration(hass, config_entry) other_entry = MockConfigEntry( domain=DOMAIN, entry_id="65432155aaddb2007c5f6602e0c38e72", @@ -951,14 +932,14 @@ async def test_reconfigure_change_ip_to_existing( other_entry.add_to_hass(hass) # original other entry - assert other_entry.data["host"] == "1.1.1.2" - assert other_entry.data["username"] == "other-username" - assert other_entry.data["password"] == "other-password" + assert other_entry.data[CONF_HOST] == "1.1.1.2" + assert other_entry.data[CONF_USERNAME] == "other-username" + assert other_entry.data[CONF_PASSWORD] == "other-password" result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": config_entry.entry_id, }, ) @@ -967,33 +948,27 @@ async def test_reconfigure_change_ip_to_existing( assert result["errors"] == {} # original entry - assert config_entry.data["host"] == "1.1.1.1" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password" + assert config_entry.data[CONF_HOST] == "1.1.1.1" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], { - "host": "1.1.1.2", - "username": "test-username", - "password": "test-password2", + CONF_HOST: "1.1.1.2", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password2", }, ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.ABORT - assert result2["reason"] == "reconfigure_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" # updated entry - assert config_entry.data["host"] == "1.1.1.2" - assert config_entry.data["username"] == "test-username" - assert config_entry.data["password"] == "test-password2" + assert config_entry.data[CONF_HOST] == "1.1.1.2" + assert config_entry.data[CONF_USERNAME] == "test-username" + assert config_entry.data[CONF_PASSWORD] == "test-password2" # unchanged other entry - assert other_entry.data["host"] == "1.1.1.2" - assert other_entry.data["username"] == "other-username" - assert other_entry.data["password"] == "other-password" - - -async def test_platforms(snapshot: SnapshotAssertion) -> None: - """Test if platform list changed and requires more tests.""" - assert snapshot == PLATFORMS + assert other_entry.data[CONF_HOST] == "1.1.1.2" + assert other_entry.data[CONF_USERNAME] == "other-username" + assert other_entry.data[CONF_PASSWORD] == "other-password" diff --git a/tests/components/enphase_envoy/test_diagnostics.py b/tests/components/enphase_envoy/test_diagnostics.py index 9ee6b7905e7..186ee5c46f3 100644 --- a/tests/components/enphase_envoy/test_diagnostics.py +++ b/tests/components/enphase_envoy/test_diagnostics.py @@ -1,6 +1,6 @@ """Test Enphase Envoy diagnostics.""" -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock from pyenphase.exceptions import EnvoyError import pytest @@ -10,9 +10,9 @@ from homeassistant.components.enphase_envoy.const import ( DOMAIN, OPTION_DIAGNOSTICS_INCLUDE_FIXTURES, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component + +from . import setup_integration from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -26,6 +26,8 @@ TO_EXCLUDE = { "last_updated", "last_changed", "last_reported", + "created_at", + "modified_at", } @@ -36,85 +38,55 @@ def limit_diagnostic_attrs(prop, path) -> bool: async def test_entry_diagnostics( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_enphase_envoy, + mock_envoy: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" + await setup_integration(hass, config_entry) assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) == snapshot(exclude=limit_diagnostic_attrs) @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture(hass: HomeAssistant, config, serial_number): +def config_entry_options_fixture(hass: HomeAssistant, config: dict[str, str]): """Define a config entry fixture.""" - entry = MockConfigEntry( + return MockConfigEntry( domain=DOMAIN, entry_id="45a36e55aaddb2007c5f6602e0c38e72", - title=f"Envoy {serial_number}" if serial_number else "Envoy", - unique_id=serial_number, + title="Envoy 1234", + unique_id="1234", data=config, options={OPTION_DIAGNOSTICS_INCLUDE_FIXTURES: True}, ) - entry.add_to_hass(hass) - return entry async def test_entry_diagnostics_with_fixtures( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_options: ConfigEntry, - setup_enphase_envoy, + config_entry_options: MockConfigEntry, + mock_envoy: AsyncMock, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" + await setup_integration(hass, config_entry_options) assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry_options ) == snapshot(exclude=limit_diagnostic_attrs) -@pytest.fixture(name="setup_enphase_envoy_options_error") -async def setup_enphase_envoy_options_error_fixture( - hass: HomeAssistant, - config, - mock_envoy_options_error, -): - """Define a fixture to set up Enphase Envoy.""" - with ( - patch( - "homeassistant.components.enphase_envoy.config_flow.Envoy", - return_value=mock_envoy_options_error, - ), - patch( - "homeassistant.components.enphase_envoy.Envoy", - return_value=mock_envoy_options_error, - ), - ): - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - yield - - -@pytest.fixture(name="mock_envoy_options_error") -def mock_envoy_options_fixture( - mock_envoy, -): - """Mock envoy with error in request.""" - mock_envoy_options = mock_envoy - mock_envoy_options.request.side_effect = AsyncMock(side_effect=EnvoyError("Test")) - return mock_envoy_options - - async def test_entry_diagnostics_with_fixtures_with_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_options: ConfigEntry, - setup_enphase_envoy_options_error, + config_entry_options: MockConfigEntry, snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, ) -> None: """Test config entry diagnostics.""" + await setup_integration(hass, config_entry_options) + mock_envoy.request.side_effect = EnvoyError("Test") assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry_options ) == snapshot(exclude=limit_diagnostic_attrs) diff --git a/tests/components/enphase_envoy/test_number.py b/tests/components/enphase_envoy/test_number.py new file mode 100644 index 00000000000..dac51ed5e26 --- /dev/null +++ b/tests/components/enphase_envoy/test_number.py @@ -0,0 +1,153 @@ +"""Test Enphase Envoy number sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_number( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test number platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_number( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test number platform entities are not created.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_number_operation_storage( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy number storage entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.NUMBER}.enpower_{sn}_reserve_battery_level" + + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.tariff.storage_settings.reserved_soc == float( + entity_state.state + ) + test_value = 30.0 + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.set_reserve_soc.assert_awaited_once_with(test_value) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_number_operation_relays( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy number relay entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.NUMBER]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.NUMBER}." + + for counter, (contact_id, dry_contact) in enumerate( + mock_envoy.data.dry_contact_settings.items() + ): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}_cutoff_battery_level" + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.dry_contact_settings[contact_id].soc_low == float( + entity_state.state + ) + test_value = 10.0 + counter + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.update_dry_contact.assert_awaited_once_with( + {"id": contact_id, "soc_low": test_value} + ) + mock_envoy.update_dry_contact.reset_mock() + + test_entity = f"{entity_base}{name}_restore_battery_level" + assert (entity_state := hass.states.get(test_entity)) + assert mock_envoy.data.dry_contact_settings[contact_id].soc_high == float( + entity_state.state + ) + test_value = 80.0 - counter + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: test_entity, + ATTR_VALUE: test_value, + }, + blocking=True, + ) + + mock_envoy.update_dry_contact.assert_awaited_once_with( + {"id": contact_id, "soc_high": test_value} + ) + mock_envoy.update_dry_contact.reset_mock() diff --git a/tests/components/enphase_envoy/test_select.py b/tests/components/enphase_envoy/test_select.py new file mode 100644 index 00000000000..38640f53dea --- /dev/null +++ b/tests/components/enphase_envoy/test_select.py @@ -0,0 +1,221 @@ +"""Test Enphase Envoy select.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.enphase_envoy.select import ( + ACTION_OPTIONS, + MODE_OPTIONS, + RELAY_ACTION_MAP, + RELAY_MODE_MAP, + REVERSE_RELAY_ACTION_MAP, + REVERSE_RELAY_MODE_MAP, + REVERSE_STORAGE_MODE_MAP, + STORAGE_MODE_MAP, + STORAGE_MODE_OPTIONS, +) +from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_select( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test select platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_select( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test select platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_relay_actions( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform entities dry contact relay actions.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SELECT}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + for target in ( + ("generator_action", dry_contact.generator_action, "generator_action"), + ("microgrid_action", dry_contact.micro_grid_action, "micro_grid_action"), + ("grid_action", dry_contact.grid_action, "grid_action"), + ): + test_entity = f"{entity_base}{name}_{target[0]}" + assert (entity_state := hass.states.get(test_entity)) + assert RELAY_ACTION_MAP[target[1]] == (current_state := entity_state.state) + # set all relay modes except current mode + for action in [action for action in ACTION_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: action, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[action]} + ) + mock_envoy.update_dry_contact.reset_mock() + # and finally back to original + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, target[2]: REVERSE_RELAY_ACTION_MAP[current_state]} + ) + mock_envoy.update_dry_contact.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_relay_modes( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform dry contact relay mode changes.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SELECT}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}_mode" + assert (entity_state := hass.states.get(test_entity)) + assert RELAY_MODE_MAP[dry_contact.mode] == (current_state := entity_state.state) + for mode in [mode for mode in MODE_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: mode, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[mode]} + ) + mock_envoy.update_dry_contact.reset_mock() + + # and finally current mode again + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.update_dry_contact.assert_called_once_with( + {"id": contact_id, "mode": REVERSE_RELAY_MODE_MAP[current_state]} + ) + mock_envoy.update_dry_contact.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_select_storage_modes( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test select platform entities storage mode changes.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SELECT]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.SELECT}.enpower_{sn}_storage_mode" + + assert (entity_state := hass.states.get(test_entity)) + assert STORAGE_MODE_MAP[mock_envoy.data.tariff.storage_settings.mode] == ( + current_state := entity_state.state + ) + + for mode in [mode for mode in STORAGE_MODE_OPTIONS if not current_state]: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: mode, + }, + blocking=True, + ) + mock_envoy.set_storage_mode.assert_called_once_with( + REVERSE_STORAGE_MODE_MAP[mode] + ) + mock_envoy.set_storage_mode.reset_mock() + + # and finally with original mode + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: test_entity, + ATTR_OPTION: current_state, + }, + blocking=True, + ) + mock_envoy.set_storage_mode.assert_called_once_with( + REVERSE_STORAGE_MODE_MAP[current_state] + ) diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 13727e29eac..273f81173ff 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -1,58 +1,914 @@ """Test Enphase Envoy sensors.""" -from unittest.mock import patch +from itertools import chain +from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory +from pyenphase.const import PHASENAMES import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.enphase_envoy import DOMAIN from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL +from homeassistant.const import STATE_UNKNOWN, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util +from homeassistant.util.unit_conversion import TemperatureConverter -from tests.common import MockConfigEntry - - -@pytest.fixture(name="setup_enphase_envoy_sensor") -async def setup_enphase_envoy_sensor_fixture(hass, config, mock_envoy): - """Define a fixture to set up Enphase Envoy with sensor platform only.""" - with ( - patch( - "homeassistant.components.enphase_envoy.config_flow.Envoy", - return_value=mock_envoy, - ), - patch( - "homeassistant.components.enphase_envoy.Envoy", - return_value=mock_envoy, - ), - patch( - "homeassistant.components.enphase_envoy.PLATFORMS", - [Platform.SENSOR], - ), - ): - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - yield +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, snapshot: SnapshotAssertion, - setup_enphase_envoy_sensor, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, ) -> None: - """Test enphase_envoy sensor entities.""" - # compare registered entities against snapshot of prior run - entity_entries = er.async_entries_for_config_entry( - entity_registry, config_entry.entry_id - ) - assert entity_entries - assert entity_entries == snapshot + """Test sensor platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) - # Test if all entities still have same state - for entity_entry in entity_entries: - assert hass.states.get(entity_entry.entity_id) == snapshot( - name=f"{entity_entry.entity_id}-state" + +PRODUCTION_NAMES: tuple[str, ...] = ( + "current_power_production", + "energy_production_today", + "energy_production_last_seven_days", + "lifetime_energy_production", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_production + PRODUCTION_TARGETS: tuple[float, ...] = ( + data.watts_now / 1000.0, + data.watt_hours_today / 1000.0, + data.watt_hours_last_7_days / 1000.0, + data.watt_hours_lifetime / 1000000.0, + ) + + for name, target in list(zip(PRODUCTION_NAMES, PRODUCTION_TARGETS, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +PRODUCTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in PRODUCTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + PRODUCTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_today / 1000.0, + phase_data.watt_hours_last_7_days / 1000.0, + phase_data.watt_hours_lifetime / 1000000.0, + ) + for phase_data in mock_envoy.data.system_production_phases.values() + ] + ) + + for name, target in list( + zip(PRODUCTION_PHASE_NAMES, PRODUCTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CONSUMPTION_NAMES: tuple[str, ...] = ( + "current_power_consumption", + "energy_consumption_today", + "energy_consumption_last_seven_days", + "lifetime_energy_consumption", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_consumption + CONSUMPTION_TARGETS = ( + data.watts_now / 1000.0, + data.watt_hours_today / 1000.0, + data.watt_hours_last_7_days / 1000.0, + data.watt_hours_lifetime / 1000000.0, + ) + + for name, target in list(zip(CONSUMPTION_NAMES, CONSUMPTION_TARGETS, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CONSUMPTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in CONSUMPTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CONSUMPTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_today / 1000.0, + phase_data.watt_hours_last_7_days / 1000.0, + phase_data.watt_hours_lifetime / 1000000.0, + ) + for phase_data in mock_envoy.data.system_consumption_phases.values() + ] + ) + + for name, target in list( + zip(CONSUMPTION_PHASE_NAMES, CONSUMPTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + +CT_PRODUCTION_NAMES_INT = ("meter_status_flags_active_production_ct",) +CT_PRODUCTION_NAMES_STR = ("metering_status_production_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_ct_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test production CT phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_production + + CT_PRODUCTION_TARGETS_INT = (len(data.status_flags),) + for name, target in list( + zip(CT_PRODUCTION_NAMES_INT, CT_PRODUCTION_TARGETS_INT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_PRODUCTION_NAMES_STR, CT_PRODUCTION_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_PRODUCTION_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_PRODUCTION_NAMES_INT +] + +CT_PRODUCTION_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_PRODUCTION_NAMES_STR +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_production_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test production ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_PRODUCTION_NAMES_FLOAT_TARGET = [ + len(phase_data.status_flags) + for phase_data in mock_envoy.data.ctmeter_production_phases.values() + ] + + for name, target in list( + zip( + CT_PRODUCTION_NAMES_FLOAT_PHASE, + CT_PRODUCTION_NAMES_FLOAT_TARGET, + strict=False, ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_NAMES_STR_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_production_phases.values() + ] + + for name, target in list( + zip( + CT_PRODUCTION_NAMES_STR_PHASE, + CT_PRODUCTION_NAMES_STR_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_CONSUMPTION_NAMES_FLOAT: tuple[str, ...] = ( + "lifetime_net_energy_consumption", + "lifetime_net_energy_production", + "current_net_power_consumption", + "frequency_net_consumption_ct", + "voltage_net_consumption_ct", + "meter_status_flags_active_net_consumption_ct", +) + +CT_CONSUMPTION_NAMES_STR: tuple[str, ...] = ("metering_status_net_consumption_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_ct_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption CT phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_consumption + + CT_CONSUMPTION_TARGETS_FLOAT = ( + data.energy_delivered / 1000000.0, + data.energy_received / 1000000.0, + data.active_power / 1000.0, + data.frequency, + data.voltage, + len(data.status_flags), + ) + for name, target in list( + zip(CT_CONSUMPTION_NAMES_FLOAT, CT_CONSUMPTION_TARGETS_FLOAT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_CONSUMPTION_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_CONSUMPTION_NAMES_STR, CT_CONSUMPTION_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_CONSUMPTION_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_CONSUMPTION_NAMES_FLOAT +] + +CT_CONSUMPTION_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in CT_CONSUMPTION_NAMES_STR +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_consumption_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET = chain( + *[ + ( + phase_data.energy_delivered / 1000000.0, + phase_data.energy_received / 1000000.0, + phase_data.active_power / 1000.0, + phase_data.frequency, + phase_data.voltage, + len(phase_data.status_flags), + ) + for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() + ] + ) + + for name, target in list( + zip( + CT_CONSUMPTION_NAMES_FLOAT_PHASE, + CT_CONSUMPTION_NAMES_FLOAT_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_CONSUMPTION_NAMES_STR_PHASE_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_consumption_phases.values() + ] + + for name, target in list( + zip( + CT_CONSUMPTION_NAMES_STR_PHASE, + CT_CONSUMPTION_NAMES_STR_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_STORAGE_NAMES_FLOAT = ( + "lifetime_battery_energy_discharged", + "lifetime_battery_energy_charged", + "current_battery_discharge", + "voltage_storage_ct", + "meter_status_flags_active_storage_ct", +) +CT_STORAGE_NAMES_STR = ("metering_status_storage_ct",) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_storage_ct_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test storage phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.ctmeter_storage + + CT_STORAGE_TARGETS_FLOAT = ( + data.energy_delivered / 1000000.0, + data.energy_received / 1000000.0, + data.active_power / 1000.0, + data.voltage, + len(data.status_flags), + ) + for name, target in list( + zip(CT_STORAGE_NAMES_FLOAT, CT_STORAGE_TARGETS_FLOAT, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_STORAGE_TARGETS_STR = (data.metering_status,) + for name, target in list( + zip(CT_STORAGE_NAMES_STR, CT_STORAGE_TARGETS_STR, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +CT_STORAGE_NAMES_FLOAT_PHASE = [ + f"{name}_{phase.lower()}" + for phase in PHASENAMES + for name in (CT_STORAGE_NAMES_FLOAT) +] + +CT_STORAGE_NAMES_STR_PHASE = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in (CT_STORAGE_NAMES_STR) +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_storage_ct_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test storage ct phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + CT_STORAGE_NAMES_FLOAT_PHASE_TARGET = chain( + *[ + ( + phase_data.energy_delivered / 1000000.0, + phase_data.energy_received / 1000000.0, + phase_data.active_power / 1000.0, + phase_data.voltage, + len(phase_data.status_flags), + ) + for phase_data in mock_envoy.data.ctmeter_storage_phases.values() + ] + ) + + for name, target in list( + zip( + CT_STORAGE_NAMES_FLOAT_PHASE, + CT_STORAGE_NAMES_FLOAT_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_STORAGE_NAMES_STR_PHASE_TARGET = [ + phase_data.metering_status + for phase_data in mock_envoy.data.ctmeter_storage_phases.values() + ] + + for name, target in list( + zip( + CT_STORAGE_NAMES_STR_PHASE, + CT_STORAGE_NAMES_STR_PHASE_TARGET, + strict=False, + ) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert entity_state.state == target + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +async def test_sensor_all_phase_entities_disabled_by_integration( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all phase entities are disabled by integration.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + assert all( + f"{ENTITY_BASE}_{entity}" + in (integration_disabled_entities(entity_registry, config_entry)) + for entity in ( + PRODUCTION_PHASE_NAMES + + CONSUMPTION_PHASE_NAMES + + CT_PRODUCTION_NAMES_FLOAT_PHASE + + CT_PRODUCTION_NAMES_STR_PHASE + + CT_CONSUMPTION_NAMES_FLOAT_PHASE + + CT_CONSUMPTION_NAMES_STR_PHASE + ) + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_storage_phase_disabled_by_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_envoy: AsyncMock, +) -> None: + """Test all storage CT phase entities are disabled by integration.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + assert all( + f"{ENTITY_BASE}_{entity}" + in integration_disabled_entities(entity_registry, config_entry) + for entity in (CT_STORAGE_NAMES_FLOAT_PHASE + CT_STORAGE_NAMES_STR_PHASE) + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_inverter_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy inverter entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SENSOR}.inverter" + + for sn, inverter in mock_envoy.data.inverters.items(): + assert (entity_state := hass.states.get(f"{entity_base}_{sn}")) + assert float(entity_state.state) == (inverter.last_report_watts) + assert (last_reported := hass.states.get(f"{entity_base}_{sn}_last_reported")) + assert dt_util.parse_datetime( + last_reported.state + ) == dt_util.utc_from_timestamp(inverter.last_report_date) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_sensor_inverter_disabled_by_integration( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test enphase_envoy inverter disabled by integration entities.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + INVERTER_BASE = f"{Platform.SENSOR}.inverter" + + assert all( + f"{INVERTER_BASE}_{sn}_last_reported" + in integration_disabled_entities(entity_registry, config_entry) + for sn in mock_envoy.data.inverters + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_aggregate_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy encharge aggregate entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.encharge_aggregate + + for target in ( + ("battery", data.state_of_charge), + ("reserve_battery_level", data.reserve_state_of_charge), + ("available_battery_energy", data.available_energy), + ("reserve_battery_energy", data.backup_reserve), + ("battery_capacity", data.max_available_capacity), + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{target[0]}")) + assert float(entity_state.state) == target[1] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_enpower_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy encharge enpower entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + ENTITY_BASE = f"{Platform.SENSOR}.enpower" + + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) + assert ( + round( + TemperatureConverter.convert( + float(entity_state.state), + hass.config.units.temperature_unit, + UnitOfTemperature.FAHRENHEIT + if mock_envoy.data.enpower.temperature_unit == "F" + else UnitOfTemperature.CELSIUS, + ) + ) + == mock_envoy.data.enpower.temperature + ) + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) + assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( + mock_envoy.data.enpower.last_report_date + ) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +async def test_sensor_encharge_power_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, +) -> None: + """Test enphase_envoy encharge_power entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + ENTITY_BASE = f"{Platform.SENSOR}.encharge" + + ENCHARGE_POWER_NAMES = ( + "battery", + "apparent_power", + "power", + ) + + ENCHARGE_POWER_TARGETS = [ + ( + sn, + ( + encharge_power.soc, + encharge_power.apparent_power_mva / 1000.0, + encharge_power.real_power_mw / 1000.0, + ), + ) + for sn, encharge_power in mock_envoy.data.encharge_power.items() + ] + + for sn, sn_target in ENCHARGE_POWER_TARGETS: + for name, target in list(zip(ENCHARGE_POWER_NAMES, sn_target, strict=False)): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_{name}")) + assert float(entity_state.state) == target + + for sn, encharge_inventory in mock_envoy.data.encharge_inventory.items(): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_temperature")) + assert ( + round( + TemperatureConverter.convert( + float(entity_state.state), + hass.config.units.temperature_unit, + UnitOfTemperature.FAHRENHEIT + if encharge_inventory.temperature_unit == "F" + else UnitOfTemperature.CELSIUS, + ) + ) + == encharge_inventory.temperature + ) + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{sn}_last_reported")) + assert dt_util.parse_datetime(entity_state.state) == dt_util.utc_from_timestamp( + encharge_inventory.last_report_date + ) + + +def integration_disabled_entities( + entity_registry: er.EntityRegistry, config_entry: MockConfigEntry +) -> list[str]: + """Return list of entity ids marked as disabled by integration.""" + return [ + entity_entry.entity_id + for entity_entry in er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + if entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION + ] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_missing_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_envoy: AsyncMock, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test enphase_envoy sensor platform midding data handling.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + ENTITY_BASE = f"{Platform.SENSOR}.envoy_{mock_envoy.serial_number}" + + # force missing data to test 'if == none' code sections + mock_envoy.data.system_production_phases["L2"] = None + mock_envoy.data.system_consumption_phases["L2"] = None + mock_envoy.data.ctmeter_production = None + mock_envoy.data.ctmeter_consumption = None + mock_envoy.data.ctmeter_storage = None + mock_envoy.data.ctmeter_production_phases = None + mock_envoy.data.ctmeter_consumption_phases = None + mock_envoy.data.ctmeter_storage_phases = None + + # use different inverter serial to test 'expected inverter missing' code + mock_envoy.data.inverters["2"] = mock_envoy.data.inverters.pop("1") + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed"} + + # MOve time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + # all these should now be in unknown state + for entity in ( + "lifetime_energy_production_l2", + "lifetime_energy_consumption_l2", + "metering_status_production_ct", + "metering_status_net_consumption_ct", + "metering_status_storage_ct", + "metering_status_production_ct_l2", + "metering_status_net_consumption_ct_l2", + "metering_status_storage_ct_l2", + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{entity}")) + assert entity_state.state == STATE_UNKNOWN + + # test the original inverter is now unknown + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNKNOWN diff --git a/tests/components/enphase_envoy/test_switch.py b/tests/components/enphase_envoy/test_switch.py new file mode 100644 index 00000000000..15f59cc3ea6 --- /dev/null +++ b/tests/components/enphase_envoy/test_switch.py @@ -0,0 +1,213 @@ +"""Test Enphase Envoy switch platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TOGGLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_switch( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities against snapshot.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy", + "envoy_1p_metered", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +async def test_no_switch( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test switch platform entities are not created.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + assert not er.async_entries_for_config_entry(entity_registry, config_entry.entry_id) + + +@pytest.mark.parametrize( + ("mock_envoy"), ["envoy_metered_batt_relay"], indirect=["mock_envoy"] +) +async def test_switch_grid_operation( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test switch platform operation for grid switches.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.data.enpower.serial_number + test_entity = f"{Platform.SWITCH}.enpower_{sn}_grid_enabled" + + # validate envoy value is reflected in entity + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == STATE_ON + + # test grid status switch operation + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_off_grid.assert_awaited_once_with() + mock_envoy.go_off_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_on_grid.assert_awaited_once_with() + mock_envoy.go_on_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.go_off_grid.assert_awaited_once_with() + mock_envoy.go_off_grid.reset_mock() + + test_entity = f"{Platform.SWITCH}.enpower_{sn}_charge_from_grid" + + # validate envoy value is reflected in entity + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == STATE_ON + + # test grid status switch operation + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.disable_charge_from_grid.assert_awaited_once_with() + mock_envoy.disable_charge_from_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.enable_charge_from_grid.assert_awaited_once_with() + mock_envoy.enable_charge_from_grid.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + mock_envoy.disable_charge_from_grid.assert_awaited_once_with() + mock_envoy.disable_charge_from_grid.reset_mock() + + +@pytest.mark.parametrize( + ("mock_envoy", "entity_states"), + [ + ( + "envoy_metered_batt_relay", + { + "NC1": (STATE_OFF, 0, 1), + "NC2": (STATE_ON, 1, 0), + "NC3": (STATE_OFF, 0, 1), + }, + ) + ], + indirect=["mock_envoy"], +) +async def test_switch_relay_operation( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + entity_states: dict[str, tuple[str, int, int]], +) -> None: + """Test enphase_envoy switch relay entities operation.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, config_entry) + + entity_base = f"{Platform.SWITCH}." + + for contact_id, dry_contact in mock_envoy.data.dry_contact_settings.items(): + name = dry_contact.load_name.lower().replace(" ", "_") + test_entity = f"{entity_base}{name}" + assert (entity_state := hass.states.get(test_entity)) + assert entity_state.state == entity_states[contact_id][0] + open_count = entity_states[contact_id][1] + close_count = entity_states[contact_id][2] + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + mock_envoy.open_dry_contact.assert_awaited_once_with(contact_id) + mock_envoy.close_dry_contact.assert_not_awaited() + mock_envoy.open_dry_contact.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + mock_envoy.close_dry_contact.assert_awaited_once_with(contact_id) + mock_envoy.open_dry_contact.assert_not_awaited() + mock_envoy.close_dry_contact.reset_mock() + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: test_entity}, + blocking=True, + ) + + assert mock_envoy.open_dry_contact.await_count == open_count + assert mock_envoy.close_dry_contact.await_count == close_count + mock_envoy.open_dry_contact.reset_mock() + mock_envoy.close_dry_contact.reset_mock() diff --git a/tests/components/environment_canada/__init__.py b/tests/components/environment_canada/__init__.py index 65b0ed16207..92c28e09b74 100644 --- a/tests/components/environment_canada/__init__.py +++ b/tests/components/environment_canada/__init__.py @@ -1 +1,67 @@ """Tests for the Environment Canada integration.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock, MagicMock, patch + +from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN +from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +FIXTURE_USER_INPUT = { + CONF_LATITUDE: 55.55, + CONF_LONGITUDE: 42.42, + CONF_STATION: "XX/1234567", + CONF_LANGUAGE: "Gibberish", +} + + +async def init_integration(hass: HomeAssistant, ec_data) -> MockConfigEntry: + """Set up the Environment Canada integration in Home Assistant.""" + + def mock_ec(): + ec_mock = MagicMock() + ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] + ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] + ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] + ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] + ec_mock.update = AsyncMock() + return ec_mock + + config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT, title="Home") + config_entry.add_to_hass(hass) + + weather_mock = mock_ec() + ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) + weather_mock.conditions = ec_data["conditions"] + weather_mock.alerts = ec_data["alerts"] + weather_mock.daily_forecasts = ec_data["daily_forecasts"] + weather_mock.metadata = ec_data["metadata"] + + radar_mock = mock_ec() + radar_mock.image = b"GIF..." + radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) + + with ( + patch( + "homeassistant.components.environment_canada.ECWeather", + return_value=weather_mock, + ), + patch( + "homeassistant.components.environment_canada.ECAirQuality", + return_value=mock_ec(), + ), + patch( + "homeassistant.components.environment_canada.ECRadar", + return_value=radar_mock, + ), + patch( + "homeassistant.components.environment_canada.config_flow.ECWeather", + return_value=weather_mock, + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/environment_canada/fixtures/current_conditions_data.json b/tests/components/environment_canada/fixtures/current_conditions_data.json index f3a18869940..ceb00028f95 100644 --- a/tests/components/environment_canada/fixtures/current_conditions_data.json +++ b/tests/components/environment_canada/fixtures/current_conditions_data.json @@ -135,7 +135,8 @@ "icon_code": "30", "temperature": -1, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-03 15:00:00+00:00" }, { "period": "Tuesday", @@ -143,7 +144,8 @@ "icon_code": "00", "temperature": 18, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-04 15:00:00+00:00" }, { "period": "Tuesday night", @@ -151,7 +153,8 @@ "icon_code": "30", "temperature": 3, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-04 15:00:00+00:00" }, { "period": "Wednesday", @@ -159,7 +162,8 @@ "icon_code": "00", "temperature": 20, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-05 15:00:00+00:00" }, { "period": "Wednesday night", @@ -167,7 +171,8 @@ "icon_code": "30", "temperature": 9, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-05 15:00:00+00:00" }, { "period": "Thursday", @@ -175,7 +180,8 @@ "icon_code": "02", "temperature": 20, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-06 15:00:00+00:00" }, { "period": "Thursday night", @@ -183,7 +189,8 @@ "icon_code": "12", "temperature": 7, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-06 15:00:00+00:00" }, { "period": "Friday", @@ -191,7 +198,8 @@ "icon_code": "12", "temperature": 13, "temperature_class": "high", - "precip_probability": 40 + "precip_probability": 40, + "timestamp": "2022-10-07 15:00:00+00:00" }, { "period": "Friday night", @@ -199,7 +207,8 @@ "icon_code": "32", "temperature": 1, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-07 15:00:00+00:00" }, { "period": "Saturday", @@ -207,7 +216,8 @@ "icon_code": "02", "temperature": 10, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-08 15:00:00+00:00" }, { "period": "Saturday night", @@ -215,7 +225,8 @@ "icon_code": "32", "temperature": 3, "temperature_class": "low", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-08 15:00:00+00:00" }, { "period": "Sunday", @@ -223,7 +234,8 @@ "icon_code": "02", "temperature": 12, "temperature_class": "high", - "precip_probability": 0 + "precip_probability": 0, + "timestamp": "2022-10-09 15:00:00+00:00" } ], "metadata": { diff --git a/tests/components/environment_canada/snapshots/test_weather.ambr b/tests/components/environment_canada/snapshots/test_weather.ambr new file mode 100644 index 00000000000..7ba37110c2a --- /dev/null +++ b/tests/components/environment_canada/snapshots/test_weather.ambr @@ -0,0 +1,94 @@ +# serializer version: 1 +# name: test_forecast_daily + dict({ + 'weather.home_forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-04 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 18.0, + 'templow': 3.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-05 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 9.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-06 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2022-10-07 15:00:00+00:00', + 'precipitation_probability': 40, + 'temperature': 13.0, + 'templow': 1.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-08 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 10.0, + 'templow': 3.0, + }), + ]), + }), + }) +# --- +# name: test_forecast_daily_with_some_previous_days_data + dict({ + 'weather.home_forecast': dict({ + 'forecast': list([ + dict({ + 'condition': 'clear-night', + 'datetime': '2022-10-03 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': None, + 'templow': -1.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-04 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 18.0, + 'templow': 3.0, + }), + dict({ + 'condition': 'sunny', + 'datetime': '2022-10-05 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 9.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-06 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 20.0, + 'templow': 7.0, + }), + dict({ + 'condition': 'rainy', + 'datetime': '2022-10-07 15:00:00+00:00', + 'precipitation_probability': 40, + 'temperature': 13.0, + 'templow': 1.0, + }), + dict({ + 'condition': 'partlycloudy', + 'datetime': '2022-10-08 15:00:00+00:00', + 'precipitation_probability': 0, + 'temperature': 10.0, + 'templow': 3.0, + }), + ]), + }), + }) +# --- diff --git a/tests/components/environment_canada/test_diagnostics.py b/tests/components/environment_canada/test_diagnostics.py index 8f800111d39..7e9c8691f90 100644 --- a/tests/components/environment_canada/test_diagnostics.py +++ b/tests/components/environment_canada/test_diagnostics.py @@ -1,16 +1,16 @@ """Test Environment Canada diagnostics.""" -from datetime import UTC, datetime import json -from unittest.mock import AsyncMock, MagicMock, patch from syrupy import SnapshotAssertion -from homeassistant.components.environment_canada.const import CONF_STATION, DOMAIN +from homeassistant.components.environment_canada.const import CONF_STATION from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry, load_fixture +from . import init_integration + +from tests.common import load_fixture from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -22,60 +22,6 @@ FIXTURE_USER_INPUT = { } -async def init_integration(hass: HomeAssistant) -> MockConfigEntry: - """Set up the Environment Canada integration in Home Assistant.""" - - def mock_ec(): - ec_mock = MagicMock() - ec_mock.station_id = FIXTURE_USER_INPUT[CONF_STATION] - ec_mock.lat = FIXTURE_USER_INPUT[CONF_LATITUDE] - ec_mock.lon = FIXTURE_USER_INPUT[CONF_LONGITUDE] - ec_mock.language = FIXTURE_USER_INPUT[CONF_LANGUAGE] - ec_mock.update = AsyncMock() - return ec_mock - - config_entry = MockConfigEntry(domain=DOMAIN, data=FIXTURE_USER_INPUT) - config_entry.add_to_hass(hass) - - ec_data = json.loads( - load_fixture("environment_canada/current_conditions_data.json") - ) - - weather_mock = mock_ec() - ec_data["metadata"]["timestamp"] = datetime(2022, 10, 4, tzinfo=UTC) - weather_mock.conditions = ec_data["conditions"] - weather_mock.alerts = ec_data["alerts"] - weather_mock.daily_forecasts = ec_data["daily_forecasts"] - weather_mock.metadata = ec_data["metadata"] - - radar_mock = mock_ec() - radar_mock.image = b"GIF..." - radar_mock.timestamp = datetime(2022, 10, 4, tzinfo=UTC) - - with ( - patch( - "homeassistant.components.environment_canada.ECWeather", - return_value=weather_mock, - ), - patch( - "homeassistant.components.environment_canada.ECAirQuality", - return_value=mock_ec(), - ), - patch( - "homeassistant.components.environment_canada.ECRadar", - return_value=radar_mock, - ), - patch( - "homeassistant.components.environment_canada.config_flow.ECWeather", - return_value=weather_mock, - ), - ): - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - return config_entry - - async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, @@ -83,7 +29,11 @@ async def test_entry_diagnostics( ) -> None: """Test config entry diagnostics.""" - config_entry = await init_integration(hass) + ec_data = json.loads( + load_fixture("environment_canada/current_conditions_data.json") + ) + + config_entry = await init_integration(hass, ec_data) diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) diff --git a/tests/components/environment_canada/test_weather.py b/tests/components/environment_canada/test_weather.py new file mode 100644 index 00000000000..e8c21e2dc06 --- /dev/null +++ b/tests/components/environment_canada/test_weather.py @@ -0,0 +1,68 @@ +"""Test weather.""" + +import json + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.weather import ( + DOMAIN as WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, +) +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import load_fixture + + +async def test_forecast_daily( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test basic forecast.""" + + ec_data = json.loads( + load_fixture("environment_canada/current_conditions_data.json") + ) + + # First entry in test data is a half day; we don't want that for this test + del ec_data["daily_forecasts"][0] + + await init_integration(hass, ec_data) + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + { + "entity_id": "weather.home_forecast", + "type": "daily", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_forecast_daily_with_some_previous_days_data( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test forecast with half day at start.""" + + ec_data = json.loads( + load_fixture("environment_canada/current_conditions_data.json") + ) + + await init_integration(hass, ec_data) + + response = await hass.services.async_call( + WEATHER_DOMAIN, + SERVICE_GET_FORECASTS, + { + "entity_id": "weather.home_forecast", + "type": "daily", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index ac1558b8aa0..75be231558f 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from asyncio import Event -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch @@ -175,7 +175,7 @@ def mock_client(mock_device_info) -> APIClient: @pytest.fixture -async def mock_dashboard(hass): +async def mock_dashboard(hass: HomeAssistant) -> AsyncGenerator[dict[str, Any]]: """Mock dashboard.""" data = {"configured": [], "importable": []} with patch( diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 9a2b1f1a80e..68af6665380 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -2,6 +2,7 @@ from ipaddress import ip_address import json +from typing import Any from unittest.mock import AsyncMock, patch from aioesphomeapi import ( @@ -329,7 +330,7 @@ async def test_user_invalid_password(hass: HomeAssistant, mock_client) -> None: async def test_user_dashboard_has_wrong_key( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step with key from dashboard that is incorrect.""" @@ -376,7 +377,7 @@ async def test_user_dashboard_has_wrong_key( async def test_user_discovers_name_and_gets_key_from_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name and get the key from the dashboard.""" @@ -429,7 +430,7 @@ async def test_user_discovers_name_and_gets_key_from_dashboard_fails( hass: HomeAssistant, dashboard_exception: Exception, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name and get the key from the dashboard.""" @@ -484,7 +485,7 @@ async def test_user_discovers_name_and_gets_key_from_dashboard_fails( async def test_user_discovers_name_and_dashboard_is_unavailable( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test user step can discover the name but the dashboard is unavailable.""" @@ -843,7 +844,7 @@ async def test_reauth_confirm_valid( async def test_reauth_fixed_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard.""" @@ -894,7 +895,7 @@ async def test_reauth_fixed_via_dashboard( async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_config_entry, mock_setup_entry: None, ) -> None: @@ -938,7 +939,7 @@ async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, mock_config_entry, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" @@ -962,7 +963,7 @@ async def test_reauth_fixed_via_remove_password( async def test_reauth_fixed_via_dashboard_at_confirm( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard at confirm step.""" @@ -1153,7 +1154,9 @@ async def test_discovery_dhcp_no_changes( assert entry.data[CONF_HOST] == "192.168.43.183" -async def test_discovery_hassio(hass: HomeAssistant, mock_dashboard) -> None: +async def test_discovery_hassio( + hass: HomeAssistant, mock_dashboard: dict[str, Any] +) -> None: """Test dashboard discovery.""" result = await hass.config_entries.flow.async_init( "esphome", @@ -1181,7 +1184,7 @@ async def test_discovery_hassio(hass: HomeAssistant, mock_dashboard) -> None: async def test_zeroconf_encryption_key_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key retrieved from dashboard.""" @@ -1247,7 +1250,7 @@ async def test_zeroconf_encryption_key_via_dashboard( async def test_zeroconf_encryption_key_via_dashboard_with_api_encryption_prop( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key retrieved from dashboard with api_encryption property set.""" @@ -1313,7 +1316,7 @@ async def test_zeroconf_encryption_key_via_dashboard_with_api_encryption_prop( async def test_zeroconf_no_encryption_key_via_dashboard( hass: HomeAssistant, mock_client, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test encryption key not retrieved from dashboard.""" diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index 1b0303a8a48..da805eb2eee 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -16,7 +16,10 @@ from tests.common import MockConfigEntry async def test_dashboard_storage( - hass: HomeAssistant, init_integration, mock_dashboard, hass_storage: dict[str, Any] + hass: HomeAssistant, + init_integration, + mock_dashboard: dict[str, Any], + hass_storage: dict[str, Any], ) -> None: """Test dashboard storage.""" assert hass_storage[dashboard.STORAGE_KEY]["data"] == { @@ -197,7 +200,9 @@ async def test_new_dashboard_fix_reauth( assert mock_config_entry.data[CONF_NOISE_PSK] == VALID_NOISE_PSK -async def test_dashboard_supports_update(hass: HomeAssistant, mock_dashboard) -> None: +async def test_dashboard_supports_update( + hass: HomeAssistant, mock_dashboard: dict[str, Any] +) -> None: """Test dashboard supports update.""" dash = dashboard.async_get_dashboard(hass) diff --git a/tests/components/esphome/test_diagnostics.py b/tests/components/esphome/test_diagnostics.py index 4fb8f993aca..b66b6d72fce 100644 --- a/tests/components/esphome/test_diagnostics.py +++ b/tests/components/esphome/test_diagnostics.py @@ -1,9 +1,11 @@ """Tests for the diagnostics data provided by the ESPHome integration.""" +from typing import Any from unittest.mock import ANY import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components import bluetooth from homeassistant.core import HomeAssistant @@ -20,13 +22,13 @@ async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, init_integration: MockConfigEntry, - mock_dashboard, + mock_dashboard: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for config entry.""" result = await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) async def test_diagnostics_with_bluetooth( @@ -60,6 +62,7 @@ async def test_diagnostics_with_bluetooth( }, }, "config": { + "created_at": ANY, "data": { "device_name": "test", "host": "test.local", @@ -70,6 +73,7 @@ async def test_diagnostics_with_bluetooth( "domain": "esphome", "entry_id": ANY, "minor_version": 1, + "modified_at": ANY, "options": {"allow_service_calls": False}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 01f267581f4..9d2a906466e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1024,7 +1024,7 @@ async def test_esphome_device_with_project( ) assert dev.manufacturer == "mfr" assert dev.model == "model" - assert dev.hw_version == "2.2.2" + assert dev.sw_version == "2.2.2 (ESPHome 1.0.0)" async def test_esphome_device_with_manufacturer( @@ -1229,7 +1229,7 @@ async def test_manager_voice_assistant_handlers_api( "", 0, None, None ) - assert "Voice assistant UDP server was not stopped" in caplog.text + assert "Previous Voice assistant pipeline was not stopped" in caplog.text await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) diff --git a/tests/components/esphome/test_sensor.py b/tests/components/esphome/test_sensor.py index bebfaaa69d4..76f71b53167 100644 --- a/tests/components/esphome/test_sensor.py +++ b/tests/components/esphome/test_sensor.py @@ -28,10 +28,10 @@ from homeassistant.const import ( ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, + EntityCategory, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity import EntityCategory from .conftest import MockESPHomeDevice diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index cca1dd1851f..83e89b1de00 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -1,12 +1,14 @@ """Test ESPHome update entities.""" from collections.abc import Awaitable, Callable +from typing import Any from unittest.mock import Mock, patch from aioesphomeapi import ( APIClient, EntityInfo, EntityState, + UpdateCommand, UpdateInfo, UpdateState, UserService, @@ -14,6 +16,10 @@ from aioesphomeapi import ( import pytest from homeassistant.components.esphome.dashboard import async_get_dashboard +from homeassistant.components.homeassistant import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, +) from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, @@ -89,7 +95,7 @@ async def test_update_entity( stub_reconnect, mock_config_entry, mock_device_info, - mock_dashboard, + mock_dashboard: dict[str, Any], devices_payload, expected_state, expected_attributes, @@ -195,7 +201,7 @@ async def test_update_static_info( [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity.""" mock_dashboard["configured"] = [ @@ -241,7 +247,7 @@ async def test_update_device_state_for_availability( expected_disconnect: bool, expected_state: str, has_deep_sleep: bool, - mock_dashboard, + mock_dashboard: dict[str, Any], mock_client: APIClient, mock_esphome_device: Callable[ [APIClient, list[EntityInfo], list[UserService], list[EntityState]], @@ -277,7 +283,7 @@ async def test_update_entity_dashboard_not_available_startup( stub_reconnect, mock_config_entry, mock_device_info, - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is not available at startup.""" with ( @@ -326,7 +332,7 @@ async def test_update_entity_dashboard_discovered_after_startup_but_update_faile [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when dashboard is discovered after startup and the first update fails.""" with patch( @@ -391,7 +397,7 @@ async def test_update_becomes_available_at_runtime( [APIClient, list[EntityInfo], list[UserService], list[EntityState]], Awaitable[MockESPHomeDevice], ], - mock_dashboard, + mock_dashboard: dict[str, Any], ) -> None: """Test ESPHome update entity when the dashboard has no device at startup but gets them later.""" await mock_esphome_device( @@ -526,3 +532,12 @@ async def test_generic_device_update_entity_has_update( assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 + + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "update.test_myupdate"}, + blocking=True, + ) + + mock_client.update_command.assert_called_with(key=1, command=UpdateCommand.CHECK) diff --git a/tests/components/event/test_init.py b/tests/components/event/test_init.py index 981a7744beb..c6828c2c290 100644 --- a/tests/components/event/test_init.py +++ b/tests/components/event/test_init.py @@ -1,10 +1,10 @@ """The tests for the event integration.""" +from collections.abc import Generator from typing import Any from freezegun import freeze_time import pytest -from typing_extensions import Generator from homeassistant.components.event import ( ATTR_EVENT_TYPE, diff --git a/tests/components/evil_genius_labs/conftest.py b/tests/components/evil_genius_labs/conftest.py index 3941917e130..fc0725607e2 100644 --- a/tests/components/evil_genius_labs/conftest.py +++ b/tests/components/evil_genius_labs/conftest.py @@ -1,36 +1,44 @@ """Test helpers for Evil Genius Labs.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import patch import pytest +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) @pytest.fixture(scope="package") -def all_fixture(): +def all_fixture() -> dict[str, Any]: """Fixture data.""" - data = json.loads(load_fixture("data.json", "evil_genius_labs")) + data = load_json_array_fixture("data.json", "evil_genius_labs") return {item["name"]: item for item in data} @pytest.fixture(scope="package") -def info_fixture(): +def info_fixture() -> JsonObjectType: """Fixture info.""" - return json.loads(load_fixture("info.json", "evil_genius_labs")) + return load_json_object_fixture("info.json", "evil_genius_labs") @pytest.fixture(scope="package") -def product_fixture(): +def product_fixture() -> dict[str, str]: """Fixture info.""" return {"productName": "Fibonacci256"} @pytest.fixture -def config_entry(hass): +def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Evil genius labs config entry.""" entry = MockConfigEntry(domain="evil_genius_labs", data={"host": "192.168.1.113"}) entry.add_to_hass(hass) @@ -39,8 +47,13 @@ def config_entry(hass): @pytest.fixture async def setup_evil_genius_labs( - hass, config_entry, all_fixture, info_fixture, product_fixture, platforms -): + hass: HomeAssistant, + config_entry: MockConfigEntry, + all_fixture: dict[str, Any], + info_fixture: JsonObjectType, + product_fixture: dict[str, str], + platforms: list[Platform], +) -> AsyncGenerator[None]: """Test up Evil Genius Labs instance.""" with ( patch( diff --git a/tests/components/ezviz/__init__.py b/tests/components/ezviz/__init__.py index 9fc297be099..78bbee0b0ad 100644 --- a/tests/components/ezviz/__init__.py +++ b/tests/components/ezviz/__init__.py @@ -1,6 +1,6 @@ """Tests for the EZVIZ integration.""" -from unittest.mock import patch +from unittest.mock import _patch, patch from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -83,10 +83,11 @@ API_LOGIN_RETURN_VALIDATE = { } -def _patch_async_setup_entry(return_value=True): +def patch_async_setup_entry() -> _patch: + """Patch async_setup_entry.""" return patch( "homeassistant.components.ezviz.async_setup_entry", - return_value=return_value, + return_value=True, ) diff --git a/tests/components/ezviz/conftest.py b/tests/components/ezviz/conftest.py index 10fd0406a1c..171cfffc2fc 100644 --- a/tests/components/ezviz/conftest.py +++ b/tests/components/ezviz/conftest.py @@ -1,11 +1,14 @@ """Define pytest.fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from pyezviz import EzvizClient from pyezviz.test_cam_rtsp import TestRTSPAuth import pytest +from homeassistant.core import HomeAssistant + ezviz_login_token_return = { "session_id": "fake_token", "rf_session_id": "fake_rf_token", @@ -14,13 +17,13 @@ ezviz_login_token_return = { @pytest.fixture(autouse=True) -def mock_ffmpeg(hass): +def mock_ffmpeg(hass: HomeAssistant) -> None: """Mock ffmpeg is loaded.""" hass.config.components.add("ffmpeg") @pytest.fixture -def ezviz_test_rtsp_config_flow(hass): +def ezviz_test_rtsp_config_flow() -> Generator[MagicMock]: """Mock the EzvizApi for easier testing.""" with ( patch.object(TestRTSPAuth, "main", return_value=True), @@ -40,7 +43,7 @@ def ezviz_test_rtsp_config_flow(hass): @pytest.fixture -def ezviz_config_flow(hass): +def ezviz_config_flow() -> Generator[MagicMock]: """Mock the EzvizAPI for easier config flow testing.""" with ( patch.object(EzvizClient, "login", return_value=True), diff --git a/tests/components/ezviz/test_config_flow.py b/tests/components/ezviz/test_config_flow.py index 57c3ae0600e..f9459635f2c 100644 --- a/tests/components/ezviz/test_config_flow.py +++ b/tests/components/ezviz/test_config_flow.py @@ -1,6 +1,6 @@ """Test the EZVIZ config flow.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch from pyezviz.exceptions import ( AuthTestResultFailed, @@ -10,6 +10,7 @@ from pyezviz.exceptions import ( InvalidURL, PyEzvizError, ) +import pytest from homeassistant.components.ezviz.const import ( ATTR_SERIAL, @@ -40,12 +41,13 @@ from . import ( API_LOGIN_RETURN_VALIDATE, DISCOVERY_INFO, USER_INPUT_VALIDATE, - _patch_async_setup_entry, init_integration, + patch_async_setup_entry, ) -async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_user_form(hass: HomeAssistant) -> None: """Test the user initiated form.""" result = await hass.config_entries.flow.async_init( @@ -55,7 +57,7 @@ async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -75,7 +77,8 @@ async def test_user_form(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["reason"] == "already_configured_account" -async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_user_custom_url(hass: HomeAssistant) -> None: """Test custom url step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -94,7 +97,7 @@ async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: assert result["step_id"] == "user_custom_url" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_URL: "test-user"}, @@ -107,7 +110,8 @@ async def test_user_custom_url(hass: HomeAssistant, ezviz_config_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_async_step_reauth(hass: HomeAssistant, ezviz_config_flow) -> None: +@pytest.mark.usefixtures("ezviz_config_flow") +async def test_async_step_reauth(hass: HomeAssistant) -> None: """Test the reauth step.""" result = await hass.config_entries.flow.async_init( @@ -117,7 +121,7 @@ async def test_async_step_reauth(hass: HomeAssistant, ezviz_config_flow) -> None assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, @@ -185,9 +189,8 @@ async def test_step_reauth_abort_if_cloud_account_missing(hass: HomeAssistant) - assert result["reason"] == "ezviz_cloud_account_missing" -async def test_async_step_integration_discovery( - hass: HomeAssistant, ezviz_config_flow, ezviz_test_rtsp_config_flow -) -> None: +@pytest.mark.usefixtures("ezviz_config_flow", "ezviz_test_rtsp_config_flow") +async def test_async_step_integration_discovery(hass: HomeAssistant) -> None: """Test discovery and confirm step.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): await init_integration(hass) @@ -199,7 +202,7 @@ async def test_async_step_integration_discovery( assert result["step_id"] == "confirm" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -221,7 +224,7 @@ async def test_async_step_integration_discovery( async def test_options_flow(hass: HomeAssistant) -> None: """Test updating options.""" - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: entry = await init_integration(hass) assert entry.options[CONF_FFMPEG_ARGUMENTS] == DEFAULT_FFMPEG_ARGUMENTS @@ -245,7 +248,9 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_user_form_exception(hass: HomeAssistant, ezviz_config_flow) -> None: +async def test_user_form_exception( + hass: HomeAssistant, ezviz_config_flow: MagicMock +) -> None: """Test we handle exception on user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -311,7 +316,7 @@ async def test_user_form_exception(hass: HomeAssistant, ezviz_config_flow) -> No async def test_discover_exception_step1( hass: HomeAssistant, - ezviz_config_flow, + ezviz_config_flow: MagicMock, ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -397,10 +402,9 @@ async def test_discover_exception_step1( assert result["reason"] == "unknown" +@pytest.mark.usefixtures("ezviz_config_flow") async def test_discover_exception_step3( - hass: HomeAssistant, - ezviz_config_flow, - ezviz_test_rtsp_config_flow, + hass: HomeAssistant, ezviz_test_rtsp_config_flow: MagicMock ) -> None: """Test we handle unexpected exception on discovery.""" with patch("homeassistant.components.ezviz.PLATFORMS_BY_TYPE", []): @@ -459,7 +463,7 @@ async def test_discover_exception_step3( async def test_user_custom_url_exception( - hass: HomeAssistant, ezviz_config_flow + hass: HomeAssistant, ezviz_config_flow: MagicMock ) -> None: """Test we handle unexpected exception.""" ezviz_config_flow.side_effect = PyEzvizError() @@ -534,7 +538,7 @@ async def test_user_custom_url_exception( async def test_async_step_reauth_exception( - hass: HomeAssistant, ezviz_config_flow + hass: HomeAssistant, ezviz_config_flow: MagicMock ) -> None: """Test the reauth step exceptions.""" @@ -545,7 +549,7 @@ async def test_async_step_reauth_exception( assert result["step_id"] == "user" assert result["errors"] == {} - with _patch_async_setup_entry() as mock_setup_entry: + with patch_async_setup_entry() as mock_setup_entry: result = await hass.config_entries.flow.async_configure( result["flow_id"], USER_INPUT_VALIDATE, diff --git a/tests/components/fan/conftest.py b/tests/components/fan/conftest.py new file mode 100644 index 00000000000..2e3644793df --- /dev/null +++ b/tests/components/fan/conftest.py @@ -0,0 +1,23 @@ +"""Fixtures for Fan platform tests.""" + +from collections.abc import Generator + +import pytest + +from homeassistant.config_entries import ConfigFlow +from homeassistant.core import HomeAssistant + +from tests.common import mock_config_flow, mock_platform + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, "test.config_flow") + + with mock_config_flow("test", MockFlow): + yield diff --git a/tests/components/fan/test_device_condition.py b/tests/components/fan/test_device_condition.py index 9f9bde1a680..da48f3223af 100644 --- a/tests/components/fan/test_device_condition.py +++ b/tests/components/fan/test_device_condition.py @@ -12,11 +12,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -24,12 +20,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +104,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -184,22 +174,22 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off - event - test_event2" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -246,5 +236,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/fan/test_device_trigger.py b/tests/components/fan/test_device_trigger.py index 38f39376592..f4673636637 100644 --- a/tests/components/fan/test_device_trigger.py +++ b/tests/components/fan/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -180,7 +173,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -273,8 +266,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_on - device - {entry.entity_id} - off - on - None", f"turn_on_or_off - device - {entry.entity_id} - off - on - None", } @@ -282,8 +275,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_off - device - {entry.entity_id} - on - off - None", f"turn_on_or_off - device - {entry.entity_id} - on - off - None", } @@ -293,7 +286,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -342,9 +335,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_on - device - {entry.entity_id} - off - on - None" ) @@ -353,7 +346,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -399,16 +392,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index 04f594b959c..a72ad5e48f6 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -1,5 +1,7 @@ """Tests for fan platforms.""" +from unittest.mock import patch + import pytest from homeassistant.components import fan @@ -12,15 +14,23 @@ from homeassistant.components.fan import ( FanEntityFeature, NotValidPresetModeError, ) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.helpers.entity_registry as er from homeassistant.setup import async_setup_component from .common import MockFan from tests.common import ( + MockConfigEntry, + MockModule, + MockPlatform, help_test_all, import_and_test_deprecated_constant_enum, + mock_integration, + mock_platform, setup_test_component_platform, ) @@ -167,7 +177,10 @@ def test_deprecated_constants( enum: fan.FanEntityFeature, ) -> None: """Test deprecated constants.""" - import_and_test_deprecated_constant_enum(caplog, fan, enum, "SUPPORT_", "2025.1") + if not FanEntityFeature.TURN_OFF and not FanEntityFeature.TURN_ON: + import_and_test_deprecated_constant_enum( + caplog, fan, enum, "SUPPORT_", "2025.1" + ) def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> None: @@ -180,11 +193,288 @@ def test_deprecated_supported_features_ints(caplog: pytest.LogCaptureFixture) -> return 1 entity = MockFan() - assert entity.supported_features_compat is FanEntityFeature(1) + assert entity.supported_features is FanEntityFeature(1) assert "MockFan" in caplog.text assert "is using deprecated supported features values" in caplog.text assert "Instead it should use" in caplog.text assert "FanEntityFeature.SET_SPEED" in caplog.text caplog.clear() - assert entity.supported_features_compat is FanEntityFeature(1) + assert entity.supported_features is FanEntityFeature(1) assert "is using deprecated supported features values" not in caplog.text + + +async def test_warning_not_implemented_turn_on_off_feature( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test adding feature flag and warn if missing when methods are set.""" + + called = [] + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + def turn_on( + self, + percentage: int | None = None, + preset_mode: str | None = None, + ) -> None: + """Turn on.""" + called.append("turn_on") + + def turn_off(self) -> None: + """Turn off.""" + called.append("turn_off") + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert ( + "Entity fan.test (.MockFanEntityTest'>) " + "does not set FanEntityFeature.TURN_OFF but implements the turn_off method. Please report it to the author of the 'test' custom integration" + in caplog.text + ) + assert ( + "Entity fan.test (.MockFanEntityTest'>) " + "does not set FanEntityFeature.TURN_ON but implements the turn_on method. Please report it to the author of the 'test' custom integration" + in caplog.text + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + { + "entity_id": "fan.test", + }, + blocking=True, + ) + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_OFF, + { + "entity_id": "fan.test", + }, + blocking=True, + ) + + assert len(called) == 2 + assert "turn_on" in called + assert "turn_off" in called + + +async def test_no_warning_implemented_turn_on_off_feature( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when feature flags are set.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text + + +async def test_no_warning_integration_has_migrated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _enable_turn_on_off_backwards_compatibility = False + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text + + +async def test_no_warning_integration_implement_feature_flags( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration uses the correct feature flags.""" + + class MockFanEntityTest(MockFan): + """Mock Fan device.""" + + _attr_supported_features = ( + FanEntityFeature.DIRECTION + | FanEntityFeature.OSCILLATE + | FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_fan_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test fan platform via config entry.""" + async_add_entities([MockFanEntityTest(name="test", entity_id="fan.test")]) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.fan", + MockPlatform(async_setup_entry=async_setup_entry_fan_platform), + ) + + with patch.object( + MockFanEntityTest, "__module__", "tests.custom_components.fan.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("fan.test") + assert state is not None + + assert "does not set FanEntityFeature.TURN_OFF" not in caplog.text + assert "does not set FanEntityFeature.TURN_ON" not in caplog.text diff --git a/tests/components/feedreader/conftest.py b/tests/components/feedreader/conftest.py index 0a5342615a9..8eeb89e00cd 100644 --- a/tests/components/feedreader/conftest.py +++ b/tests/components/feedreader/conftest.py @@ -52,6 +52,18 @@ def fixture_feed_identically_timed_events(hass: HomeAssistant) -> bytes: return load_fixture_bytes("feedreader6.xml") +@pytest.fixture(name="feed_without_items") +def fixture_feed_without_items(hass: HomeAssistant) -> bytes: + """Load test feed without any items.""" + return load_fixture_bytes("feedreader7.xml") + + +@pytest.fixture(name="feed_only_summary") +def fixture_feed_only_summary(hass: HomeAssistant) -> bytes: + """Load test feed data with one event containing only a summary, no content.""" + return load_fixture_bytes("feedreader8.xml") + + @pytest.fixture(name="events") async def fixture_events(hass: HomeAssistant) -> list[Event]: """Fixture that catches alexa events.""" diff --git a/tests/components/feedreader/fixtures/feedreader.xml b/tests/components/feedreader/fixtures/feedreader.xml index 8c85a4975ee..17402cad081 100644 --- a/tests/components/feedreader/fixtures/feedreader.xml +++ b/tests/components/feedreader/fixtures/feedreader.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader1.xml b/tests/components/feedreader/fixtures/feedreader1.xml index ff856125779..c71507c15b7 100644 --- a/tests/components/feedreader/fixtures/feedreader1.xml +++ b/tests/components/feedreader/fixtures/feedreader1.xml @@ -8,19 +8,21 @@ Mon, 30 Apr 2018 15:00:00 +1000 1800 - - Title 1 - Description 1 - http://www.example.com/link/1 - GUID 1 - Mon, 30 Apr 2018 15:10:00 +1000 - Title 2 Description 2 http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:11:00 +1000 + Content 2 + + + Title 1 + Description 1 + http://www.example.com/link/1 + GUID 1 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader2.xml b/tests/components/feedreader/fixtures/feedreader2.xml index 653a16e4561..2471d70edcb 100644 --- a/tests/components/feedreader/fixtures/feedreader2.xml +++ b/tests/components/feedreader/fixtures/feedreader2.xml @@ -9,88 +9,109 @@ 1800 - Title 1 - Mon, 30 Apr 2018 15:00:00 +1000 - - - Title 2 - Mon, 30 Apr 2018 15:01:00 +1000 - - - Title 3 - Mon, 30 Apr 2018 15:02:00 +1000 - - - Title 4 - Mon, 30 Apr 2018 15:03:00 +1000 - - - Title 5 - Mon, 30 Apr 2018 15:04:00 +1000 - - - Title 6 - Mon, 30 Apr 2018 15:05:00 +1000 - - - Title 7 - Mon, 30 Apr 2018 15:06:00 +1000 - - - Title 8 - Mon, 30 Apr 2018 15:07:00 +1000 - - - Title 9 - Mon, 30 Apr 2018 15:08:00 +1000 - - - Title 10 - Mon, 30 Apr 2018 15:09:00 +1000 - - - Title 11 - Mon, 30 Apr 2018 15:10:00 +1000 - - - Title 12 - Mon, 30 Apr 2018 15:11:00 +1000 - - - Title 13 - Mon, 30 Apr 2018 15:12:00 +1000 - - - Title 14 - Mon, 30 Apr 2018 15:13:00 +1000 - - - Title 15 - Mon, 30 Apr 2018 15:14:00 +1000 - - - Title 16 - Mon, 30 Apr 2018 15:15:00 +1000 - - - Title 17 - Mon, 30 Apr 2018 15:16:00 +1000 - - - Title 18 - Mon, 30 Apr 2018 15:17:00 +1000 - - - Title 19 - Mon, 30 Apr 2018 15:18:00 +1000 + Title 21 + Mon, 30 Apr 2018 15:20:00 +1000 + Content 21 Title 20 Mon, 30 Apr 2018 15:19:00 +1000 + Content 20 - Title 21 - Mon, 30 Apr 2018 15:20:00 +1000 + Title 19 + Mon, 30 Apr 2018 15:18:00 +1000 + Content 19 + + + Title 18 + Mon, 30 Apr 2018 15:17:00 +1000 + Content 18 + + + Title 17 + Mon, 30 Apr 2018 15:16:00 +1000 + Content 17 + + + Title 16 + Mon, 30 Apr 2018 15:15:00 +1000 + Content 16 + + + Title 15 + Mon, 30 Apr 2018 15:14:00 +1000 + Content 15 + + + Title 14 + Mon, 30 Apr 2018 15:13:00 +1000 + Content 14 + + + Title 13 + Mon, 30 Apr 2018 15:12:00 +1000 + Content 13 + + + Title 12 + Mon, 30 Apr 2018 15:11:00 +1000 + Content 12 + + + Title 11 + Mon, 30 Apr 2018 15:10:00 +1000 + Content 11 + + + Title 10 + Mon, 30 Apr 2018 15:09:00 +1000 + Content 10 + + + Title 9 + Mon, 30 Apr 2018 15:08:00 +1000 + Content 9 + + + Title 8 + Mon, 30 Apr 2018 15:07:00 +1000 + Content 8 + + + Title 7 + Mon, 30 Apr 2018 15:06:00 +1000 + Content 7 + + + Title 6 + Mon, 30 Apr 2018 15:05:00 +1000 + Content 6 + + + Title 5 + Mon, 30 Apr 2018 15:04:00 +1000 + Content 5 + + + Title 4 + Mon, 30 Apr 2018 15:03:00 +1000 + Content 4 + + + Title 3 + Mon, 30 Apr 2018 15:02:00 +1000 + Content 3 + + + Title 1 + Mon, 30 Apr 2018 15:00:00 +1000 + Content 1 + + + Title 2 + Mon, 30 Apr 2018 15:01:00 +1000 + Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader3.xml b/tests/components/feedreader/fixtures/feedreader3.xml index d8ccd119306..67daef20fe8 100644 --- a/tests/components/feedreader/fixtures/feedreader3.xml +++ b/tests/components/feedreader/fixtures/feedreader3.xml @@ -14,17 +14,20 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +1000 + Content 1 Title 2 Description 2 http://www.example.com/link/2 GUID 2 + Content 2 Description 3 http://www.example.com/link/3 GUID 3 + Content 3 diff --git a/tests/components/feedreader/fixtures/feedreader4.xml b/tests/components/feedreader/fixtures/feedreader4.xml index 81828ccb6e2..11c8d501395 100644 --- a/tests/components/feedreader/fixtures/feedreader4.xml +++ b/tests/components/feedreader/fixtures/feedreader4.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 26.10.2019 - 12:06:24 + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader5.xml b/tests/components/feedreader/fixtures/feedreader5.xml index d9b1dda1ad2..562fd45ea93 100644 --- a/tests/components/feedreader/fixtures/feedreader5.xml +++ b/tests/components/feedreader/fixtures/feedreader5.xml @@ -14,5 +14,6 @@ urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 2003-12-13T18:30:02Z Some text. + Content 1 diff --git a/tests/components/feedreader/fixtures/feedreader6.xml b/tests/components/feedreader/fixtures/feedreader6.xml index 621c89787e8..48abd06b95b 100644 --- a/tests/components/feedreader/fixtures/feedreader6.xml +++ b/tests/components/feedreader/fixtures/feedreader6.xml @@ -14,6 +14,7 @@ http://www.example.com/link/1 GUID 1 Mon, 30 Apr 2018 15:10:00 +0000 + Content 1 Title 2 @@ -21,6 +22,7 @@ http://www.example.com/link/2 GUID 2 Mon, 30 Apr 2018 15:10:00 +0000 + Content 2 diff --git a/tests/components/feedreader/fixtures/feedreader7.xml b/tests/components/feedreader/fixtures/feedreader7.xml new file mode 100644 index 00000000000..0ffac8dd2ee --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader7.xml @@ -0,0 +1,11 @@ + + + + RSS Sample + This is an example of an RSS feed + http://www.example.com/main.html + Mon, 30 Apr 2018 12:00:00 +1000 + Mon, 30 Apr 2018 15:00:00 +1000 + 1800 + + diff --git a/tests/components/feedreader/fixtures/feedreader8.xml b/tests/components/feedreader/fixtures/feedreader8.xml new file mode 100644 index 00000000000..d1c167352f8 --- /dev/null +++ b/tests/components/feedreader/fixtures/feedreader8.xml @@ -0,0 +1,21 @@ + + + + RSS Sample + This is an example of an RSS feed + http://www.example.com/main.html + Mon, 30 Apr 2018 12:00:00 +1000 + Mon, 30 Apr 2018 15:00:00 +1000 + 1800 + + + Title 1 + Description 1 + http://www.example.com/link/1 + GUID 1 + Mon, 30 Apr 2018 15:10:00 +1000 + This is a summary + + + + diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 669ca665f6b..47bccce902f 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components.feedreader.const import ( ) from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_URL -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -128,7 +128,9 @@ async def test_import( assert config_entries[0].data == expected_data assert config_entries[0].options == expected_options - assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader") + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, "deprecated_yaml_feedreader" + ) async def test_import_errors( diff --git a/tests/components/feedreader/test_event.py b/tests/components/feedreader/test_event.py new file mode 100644 index 00000000000..5d903383c05 --- /dev/null +++ b/tests/components/feedreader/test_event.py @@ -0,0 +1,57 @@ +"""The tests for the feedreader event entity.""" + +from datetime import timedelta +from unittest.mock import patch + +from homeassistant.components.feedreader.event import ( + ATTR_CONTENT, + ATTR_LINK, + ATTR_TITLE, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from . import create_mock_entry +from .const import VALID_CONFIG_DEFAULT + +from tests.common import async_fire_time_changed + + +async def test_event_entity( + hass: HomeAssistant, feed_one_event, feed_two_event, feed_only_summary +) -> None: + """Test feed event entity.""" + entry = create_mock_entry(VALID_CONFIG_DEFAULT) + entry.add_to_hass(hass) + with patch( + "homeassistant.components.feedreader.coordinator.feedparser.http.get", + side_effect=[feed_one_event, feed_two_event, feed_only_summary], + ): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 1" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" + assert state.attributes[ATTR_CONTENT] == "Content 1" + + future = dt_util.utcnow() + timedelta(hours=1, seconds=1) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 2" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/2" + assert state.attributes[ATTR_CONTENT] == "Content 2" + + future = dt_util.utcnow() + timedelta(hours=2, seconds=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get("event.mock_title") + assert state + assert state.attributes[ATTR_TITLE] == "Title 1" + assert state.attributes[ATTR_LINK] == "http://www.example.com/link/1" + assert state.attributes[ATTR_CONTENT] == "This is a summary" diff --git a/tests/components/feedreader/test_init.py b/tests/components/feedreader/test_init.py index 1dcbf5ba45d..d7700d79e3b 100644 --- a/tests/components/feedreader/test_init.py +++ b/tests/components/feedreader/test_init.py @@ -165,6 +165,21 @@ async def test_feed_identical_timestamps( ) +async def test_feed_with_only_summary( + hass: HomeAssistant, events, feed_only_summary +) -> None: + """Test simple feed with only summary, no content.""" + assert await async_setup_config_entry( + hass, VALID_CONFIG_DEFAULT, return_value=feed_only_summary + ) + await hass.async_block_till_done() + + assert len(events) == 1 + assert events[0].data.title == "Title 1" + assert events[0].data.description == "Description 1" + assert events[0].data.content[0].value == "This is a summary" + + async def test_feed_updates( hass: HomeAssistant, events, feed_one_event, feed_two_event ) -> None: @@ -247,6 +262,20 @@ async def test_feed_with_unrecognized_publication_date( assert len(events) == 1 +async def test_feed_without_items( + hass: HomeAssistant, events, feed_without_items, caplog: pytest.LogCaptureFixture +) -> None: + """Test simple feed without any items.""" + assert "No new entries to be published in feed" not in caplog.text + assert await async_setup_config_entry( + hass, VALID_CONFIG_DEFAULT, return_value=feed_without_items + ) + await hass.async_block_till_done() + + assert "No new entries to be published in feed" in caplog.text + assert len(events) == 0 + + async def test_feed_invalid_data(hass: HomeAssistant, events) -> None: """Test feed with invalid data.""" assert await async_setup_config_entry( @@ -296,7 +325,7 @@ async def test_feed_errors( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) assert ( - "Error fetching feed data from http://some.rss.local/rss_feed.xml: " + "Error fetching feed data from http://some.rss.local/rss_feed.xml : " in caplog.text ) diff --git a/tests/components/fibaro/conftest.py b/tests/components/fibaro/conftest.py index d2f004a160c..4d99dea6682 100644 --- a/tests/components/fibaro/conftest.py +++ b/tests/components/fibaro/conftest.py @@ -1,9 +1,9 @@ """Test helpers.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.fibaro import CONF_IMPORT_PLUGINS, DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME diff --git a/tests/components/fido/test_sensor.py b/tests/components/fido/test_sensor.py index a067f060af8..d47c7ce8e9f 100644 --- a/tests/components/fido/test_sensor.py +++ b/tests/components/fido/test_sensor.py @@ -6,9 +6,9 @@ from unittest.mock import MagicMock, patch from pyfido.client import PyFidoError import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components.fido import sensor as fido from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import assert_setup_component diff --git a/tests/components/file/conftest.py b/tests/components/file/conftest.py index 265acde36ca..5345a0d38d0 100644 --- a/tests/components/file/conftest.py +++ b/tests/components/file/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for file platform.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/filesize/conftest.py b/tests/components/filesize/conftest.py index 859886a3058..ac66af0d22f 100644 --- a/tests/components/filesize/conftest.py +++ b/tests/components/filesize/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.filesize.const import DOMAIN from homeassistant.const import CONF_FILE_PATH diff --git a/tests/components/filter/test_sensor.py b/tests/components/filter/test_sensor.py index 0ece61708f2..a9581b78f4e 100644 --- a/tests/components/filter/test_sensor.py +++ b/tests/components/filter/test_sensor.py @@ -467,7 +467,7 @@ def test_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [20, 21] == [f.state for f in filtered] + assert [f.state for f in filtered] == [20, 21] def test_time_throttle(values: list[State]) -> None: @@ -480,7 +480,7 @@ def test_time_throttle(values: list[State]) -> None: new_state = filt.filter_state(state) if not filt.skip_processing: filtered.append(new_state) - assert [20, 18, 22] == [f.state for f in filtered] + assert [f.state for f in filtered] == [20, 18, 22] def test_time_sma(values: list[State]) -> None: diff --git a/tests/components/fitbit/conftest.py b/tests/components/fitbit/conftest.py index b1ff8a94e12..57511739993 100644 --- a/tests/components/fitbit/conftest.py +++ b/tests/components/fitbit/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for fitbit.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator import datetime from http import HTTPStatus import time @@ -9,7 +9,6 @@ from unittest.mock import patch import pytest from requests_mock.mocker import Mocker -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/fjaraskupan/test_config_flow.py b/tests/components/fjaraskupan/test_config_flow.py index fa0df9241dd..6d3df614443 100644 --- a/tests/components/fjaraskupan/test_config_flow.py +++ b/tests/components/fjaraskupan/test_config_flow.py @@ -2,7 +2,8 @@ from __future__ import annotations -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import AsyncMock, patch import pytest @@ -15,7 +16,7 @@ from . import COOKER_SERVICE_INFO @pytest.fixture(name="mock_setup_entry", autouse=True) -async def fixture_mock_setup_entry(hass): +def fixture_mock_setup_entry() -> Generator[AsyncMock]: """Fixture for config entry.""" with patch( @@ -24,7 +25,7 @@ async def fixture_mock_setup_entry(hass): yield mock_setup_entry -async def test_configure(hass: HomeAssistant, mock_setup_entry) -> None: +async def test_configure(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" with patch( "homeassistant.components.fjaraskupan.config_flow.async_discovered_service_info", diff --git a/tests/components/flexit_bacnet/conftest.py b/tests/components/flexit_bacnet/conftest.py index e1b98070d25..cc7c9fa0570 100644 --- a/tests/components/flexit_bacnet/conftest.py +++ b/tests/components/flexit_bacnet/conftest.py @@ -1,10 +1,10 @@ """Configuration for Flexit Nordic (BACnet) tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from flexit_bacnet import FlexitBACnet import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.flexit_bacnet.const import DOMAIN diff --git a/tests/components/flo/conftest.py b/tests/components/flo/conftest.py index 33d467a2abf..66b56d1f10b 100644 --- a/tests/components/flo/conftest.py +++ b/tests/components/flo/conftest.py @@ -16,7 +16,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def config_entry(hass): +def config_entry() -> MockConfigEntry: """Config entry version 1 fixture.""" return MockConfigEntry( domain=FLO_DOMAIN, diff --git a/tests/components/flo/test_binary_sensor.py b/tests/components/flo/test_binary_sensor.py index d3032cde1b5..23a84734b0d 100644 --- a/tests/components/flo/test_binary_sensor.py +++ b/tests/components/flo/test_binary_sensor.py @@ -1,5 +1,7 @@ """Test Flo by Moen binary sensor entities.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, @@ -13,9 +15,12 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry + +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_binary_sensors( - hass: HomeAssistant, config_entry, aioclient_mock_fixture + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test Flo by Moen sensors.""" config_entry.add_to_hass(hass) diff --git a/tests/components/flo/test_config_flow.py b/tests/components/flo/test_config_flow.py index 99f8f315fb2..f9237e979a6 100644 --- a/tests/components/flo/test_config_flow.py +++ b/tests/components/flo/test_config_flow.py @@ -5,6 +5,8 @@ import json import time from unittest.mock import patch +import pytest + from homeassistant import config_entries from homeassistant.components.flo.const import DOMAIN from homeassistant.const import CONTENT_TYPE_JSON @@ -16,7 +18,8 @@ from .common import TEST_EMAIL_ADDRESS, TEST_PASSWORD, TEST_TOKEN, TEST_USER_ID from tests.test_util.aiohttp import AiohttpClientMocker -async def test_form(hass: HomeAssistant, aioclient_mock_fixture) -> None: +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/flo/test_device.py b/tests/components/flo/test_device.py index 6248bdcd8f9..c3e26e77370 100644 --- a/tests/components/flo/test_device.py +++ b/tests/components/flo/test_device.py @@ -5,6 +5,7 @@ from unittest.mock import patch from aioflo.errors import RequestError from freezegun.api import FrozenDateTimeFactory +import pytest from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.flo.coordinator import FloDeviceDataUpdateCoordinator @@ -14,14 +15,14 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: @@ -90,10 +91,10 @@ async def test_device( assert aioclient_mock.call_count == call_count + 6 +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_device_failures( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, freezer: FrozenDateTimeFactory, ) -> None: diff --git a/tests/components/flo/test_init.py b/tests/components/flo/test_init.py index 599a91b80fb..805a6278395 100644 --- a/tests/components/flo/test_init.py +++ b/tests/components/flo/test_init.py @@ -1,5 +1,7 @@ """Test init.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant @@ -7,10 +9,11 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry -async def test_setup_entry( - hass: HomeAssistant, config_entry, aioclient_mock_fixture -) -> None: + +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test migration of config entry from v1.""" config_entry.add_to_hass(hass) assert await async_setup_component( diff --git a/tests/components/flo/test_sensor.py b/tests/components/flo/test_sensor.py index 5fe388c62e1..0c763927296 100644 --- a/tests/components/flo/test_sensor.py +++ b/tests/components/flo/test_sensor.py @@ -1,5 +1,7 @@ """Test Flo by Moen sensor entities.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass from homeassistant.const import ATTR_ENTITY_ID, CONF_PASSWORD, CONF_USERNAME @@ -9,12 +11,12 @@ from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker -async def test_sensors( - hass: HomeAssistant, config_entry, aioclient_mock_fixture -) -> None: +@pytest.mark.usefixtures("aioclient_mock_fixture") +async def test_sensors(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test Flo by Moen sensors.""" hass.config.units = US_CUSTOMARY_SYSTEM config_entry.add_to_hass(hass) @@ -85,10 +87,10 @@ async def test_sensors( ) +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_manual_update_entity( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test manual update entity via service homeasasistant/update_entity.""" diff --git a/tests/components/flo/test_services.py b/tests/components/flo/test_services.py index d8837d9c6b6..565f39f69fe 100644 --- a/tests/components/flo/test_services.py +++ b/tests/components/flo/test_services.py @@ -19,15 +19,16 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker SWITCH_ENTITY_ID = "switch.smart_water_shutoff_shutoff_valve" +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_services( hass: HomeAssistant, - config_entry, - aioclient_mock_fixture, + config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, ) -> None: """Test Flo services.""" diff --git a/tests/components/flo/test_switch.py b/tests/components/flo/test_switch.py index 85f7ea0f317..02ab93f9e67 100644 --- a/tests/components/flo/test_switch.py +++ b/tests/components/flo/test_switch.py @@ -1,5 +1,7 @@ """Tests for the switch domain for Flo by Moen.""" +import pytest + from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN from homeassistant.components.switch import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON @@ -8,9 +10,12 @@ from homeassistant.setup import async_setup_component from .common import TEST_PASSWORD, TEST_USER_ID +from tests.common import MockConfigEntry + +@pytest.mark.usefixtures("aioclient_mock_fixture") async def test_valve_switches( - hass: HomeAssistant, config_entry, aioclient_mock_fixture + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test Flo by Moen valve switches.""" config_entry.add_to_hass(hass) diff --git a/tests/components/flume/conftest.py b/tests/components/flume/conftest.py new file mode 100644 index 00000000000..fb0d0157bbc --- /dev/null +++ b/tests/components/flume/conftest.py @@ -0,0 +1,167 @@ +"""Flume test fixtures.""" + +from collections.abc import Generator +import datetime +from http import HTTPStatus +import json +from unittest.mock import mock_open, patch + +import jwt +import pytest +import requests +from requests_mock.mocker import Mocker + +from homeassistant.components.flume.const import DOMAIN +from homeassistant.const import ( + CONF_CLIENT_ID, + CONF_CLIENT_SECRET, + CONF_PASSWORD, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +USER_ID = "test-user-id" +REFRESH_TOKEN = "refresh-token" +TOKEN_URL = "https://api.flumetech.com/oauth/token" +DEVICE_LIST_URL = ( + "https://api.flumetech.com/users/test-user-id/devices?user=true&location=true" +) +BRIDGE_DEVICE = { + "id": "1234", + "type": 1, # Bridge + "location": { + "name": "Bridge Location", + }, + "name": "Flume Bridge", + "connected": True, +} +SENSOR_DEVICE = { + "id": "1234", + "type": 2, # Sensor + "location": { + "name": "Sensor Location", + }, + "name": "Flume Sensor", + "connected": True, +} +DEVICE_LIST = [BRIDGE_DEVICE, SENSOR_DEVICE] +NOTIFICATIONS_URL = "https://api.flumetech.com/users/test-user-id/notifications?limit=50&offset=0&sort_direction=ASC" +NOTIFICATION = { + "id": 111111, + "device_id": "6248148189204194987", + "user_id": USER_ID, + "type": 1, + "message": "Low Flow Leak triggered at Home. Water has been running for 2 hours averaging 0.43 gallons every minute.", + "created_datetime": "2020-01-15T16:33:39.000Z", + "title": "Potential Leak Detected!", + "read": True, + "extra": { + "query": { + "request_id": "SYSTEM_TRIGGERED_USAGE_ALERT", + "since_datetime": "2020-01-15 06:33:59", + "until_datetime": "2020-01-15 08:33:59", + "tz": "America/Los_Angeles", + "bucket": "MIN", + "raw": False, + "group_multiplier": 2, + "device_id": ["6248148189204194987"], + } + }, + "event_rule": "Low Flow Leak", +} + +NOTIFICATIONS_LIST = [NOTIFICATION] + + +@pytest.fixture(name="config_entry") +def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: + """Fixture to create a config entry.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title="test-username", + unique_id="test-username", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) + config_entry.add_to_hass(hass) + return config_entry + + +def encode_access_token() -> str: + """Encode the payload of the access token.""" + expiration_time = datetime.datetime.now() + datetime.timedelta(hours=12) + payload = { + "user_id": USER_ID, + "exp": int(expiration_time.timestamp()), + } + return jwt.encode(payload, key="secret") + + +@pytest.fixture(name="access_token") +def access_token_fixture(requests_mock: Mocker) -> Generator[None]: + """Fixture to setup the access token.""" + token_response = { + "refresh_token": REFRESH_TOKEN, + "access_token": encode_access_token(), + } + requests_mock.register_uri( + "POST", + TOKEN_URL, + status_code=HTTPStatus.OK, + json={"data": [token_response]}, + ) + with patch("builtins.open", mock_open(read_data=json.dumps(token_response))): + yield + + +@pytest.fixture(name="device_list") +def device_list_fixture(requests_mock: Mocker) -> None: + """Fixture to setup the device list API response access token.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={ + "data": DEVICE_LIST, + }, + ) + + +@pytest.fixture(name="device_list_timeout") +def device_list_timeout_fixture(requests_mock: Mocker) -> None: + """Fixture to test a timeout when connecting to the device list url.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + exc=requests.exceptions.ConnectTimeout, + ) + + +@pytest.fixture(name="device_list_unauthorized") +def device_list_unauthorized_fixture(requests_mock: Mocker) -> None: + """Fixture to test an authorized error from the device list url.""" + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.UNAUTHORIZED, + json={}, + ) + + +@pytest.fixture(name="notifications_list") +def notifications_list_fixture(requests_mock: Mocker) -> None: + """Fixture to setup the device list API response access token.""" + requests_mock.register_uri( + "GET", + NOTIFICATIONS_URL, + status_code=HTTPStatus.OK, + json={ + "data": NOTIFICATIONS_LIST, + }, + ) diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 706cee44739..915299223e9 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -1,8 +1,11 @@ """Test the flume config flow.""" -from unittest.mock import MagicMock, patch +from http import HTTPStatus +from unittest.mock import patch +import pytest import requests.exceptions +from requests_mock.mocker import Mocker from homeassistant import config_entries from homeassistant.components.flume.const import DOMAIN @@ -15,15 +18,12 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import DEVICE_LIST, DEVICE_LIST_URL + from tests.common import MockConfigEntry -def _get_mocked_flume_device_list(): - flume_device_list_mock = MagicMock() - type(flume_device_list_mock).device_list = ["mock"] - return flume_device_list_mock - - +@pytest.mark.usefixtures("access_token", "device_list") async def test_form(hass: HomeAssistant) -> None: """Test we get the form and can setup from user input.""" @@ -33,17 +33,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - mock_flume_device_list = _get_mocked_flume_device_list() - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - return_value=mock_flume_device_list, - ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -71,66 +61,57 @@ async def test_form(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("access_token") +async def test_form_invalid_auth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=Exception, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.UNAUTHORIZED, + json={"message": "Failure"}, + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} +@pytest.mark.usefixtures("access_token", "device_list_timeout") async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=requests.exceptions.ConnectionError(), - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_USERNAME: "test-username", - CONF_PASSWORD: "test-password", - CONF_CLIENT_ID: "client_id", - CONF_CLIENT_SECRET: "client_secret", - }, - ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "cannot_connect"} -async def test_reauth(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("access_token") +async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: """Test we can reauth.""" entry = MockConfigEntry( domain=DOMAIN, @@ -151,35 +132,28 @@ async def test_reauth(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" - with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=Exception, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_PASSWORD: "test-password", - }, - ) + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: "test-password", + }, + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"password": "invalid_auth"} + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + exc=requests.exceptions.ConnectTimeout, + ) + with ( patch( - "homeassistant.components.flume.config_flow.FlumeAuth", + "homeassistant.components.flume.config_flow.os.path.exists", return_value=True, ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - side_effect=requests.exceptions.ConnectionError(), - ), + patch("homeassistant.components.flume.config_flow.os.unlink") as mock_unlink, ): result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], @@ -187,21 +161,22 @@ async def test_reauth(hass: HomeAssistant) -> None: CONF_PASSWORD: "test-password", }, ) + # The existing token file was removed + assert len(mock_unlink.mock_calls) == 1 assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "cannot_connect"} - mock_flume_device_list = _get_mocked_flume_device_list() + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={ + "data": DEVICE_LIST, + }, + ) with ( - patch( - "homeassistant.components.flume.config_flow.FlumeAuth", - return_value=True, - ), - patch( - "homeassistant.components.flume.config_flow.FlumeDeviceList", - return_value=mock_flume_device_list, - ), patch( "homeassistant.components.flume.async_setup_entry", return_value=True, @@ -217,3 +192,31 @@ async def test_reauth(hass: HomeAssistant) -> None: assert mock_setup_entry.called assert result4["type"] is FlowResultType.ABORT assert result4["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("access_token") +async def test_form_no_devices(hass: HomeAssistant, requests_mock: Mocker) -> None: + """Test a device list response that contains no values will raise an error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + requests_mock.register_uri( + "GET", + DEVICE_LIST_URL, + status_code=HTTPStatus.OK, + json={"data": []}, + ) + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} diff --git a/tests/components/flume/test_init.py b/tests/components/flume/test_init.py new file mode 100644 index 00000000000..85c01c1051e --- /dev/null +++ b/tests/components/flume/test_init.py @@ -0,0 +1,135 @@ +"""Test the flume init.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest +from requests_mock.mocker import Mocker + +from homeassistant import config_entries +from homeassistant.components.flume.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .conftest import USER_ID + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +def platforms_fixture() -> Generator[None]: + """Return the platforms to be loaded for this test.""" + # Arbitrary platform to ensure notifications are loaded + with patch("homeassistant.components.flume.PLATFORMS", [Platform.BINARY_SENSOR]): + yield + + +@pytest.mark.usefixtures("access_token", "device_list") +async def test_setup_config_entry( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test load and unload of a ConfigEntry.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("access_token", "device_list_timeout") +async def test_device_list_timeout( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for a timeout when listing devices.""" + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.SETUP_RETRY + + +@pytest.mark.usefixtures("access_token", "device_list_unauthorized") +async def test_reauth_when_unauthorized( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for an authentication error when listing devices.""" + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is config_entries.ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + assert flows[0]["step_id"] == "reauth_confirm" + + +@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") +async def test_list_notifications_service( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test the list notifications service.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + response = await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + notifications = response.get("notifications") + assert notifications + assert len(notifications) == 1 + assert notifications[0].get("user_id") == USER_ID + + +@pytest.mark.usefixtures("access_token", "device_list", "notifications_list") +async def test_list_notifications_service_config_entry_errors( + hass: HomeAssistant, + requests_mock: Mocker, + config_entry: MockConfigEntry, +) -> None: + """Test error handling for notification service with invalid config entries.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is config_entries.ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state is config_entries.ConfigEntryState.NOT_LOADED + + with pytest.raises(ValueError, match="Config entry not loaded"): + await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + with pytest.raises(ValueError, match="Invalid config entry: does-not-exist"): + await hass.services.async_call( + DOMAIN, + "list_notifications", + {}, + target={ + "config_entry": "does-not-exist", + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index ab85303584f..f957083dd11 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -29,7 +29,7 @@ from tests.components.light.common import MockLight @pytest.fixture(autouse=True) -async def set_utc(hass): +async def set_utc(hass: HomeAssistant) -> None: """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") @@ -723,10 +723,8 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( assert call.data[light.ATTR_XY_COLOR] == [0.439, 0.37] -@pytest.mark.parametrize("x", [0, 1]) async def test_flux_after_sunset_before_midnight_stop_next_day( hass: HomeAssistant, - x, mock_light_entities: list[MockLight], ) -> None: """Test the flux switch after sunset and before stop. diff --git a/tests/components/flux_led/conftest.py b/tests/components/flux_led/conftest.py index 2a67c7b46f7..d323b321e08 100644 --- a/tests/components/flux_led/conftest.py +++ b/tests/components/flux_led/conftest.py @@ -1,20 +1,13 @@ """Tests for the flux_led integration.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from tests.common import mock_device_registry - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - @pytest.fixture -def mock_single_broadcast_address(): +def mock_single_broadcast_address() -> Generator[None]: """Mock network's async_async_get_ipv4_broadcast_addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", @@ -24,7 +17,7 @@ def mock_single_broadcast_address(): @pytest.fixture -def mock_multiple_broadcast_addresses(): +def mock_multiple_broadcast_addresses() -> Generator[None]: """Mock network's async_async_get_ipv4_broadcast_addresses to return multiple addresses.""" with patch( "homeassistant.components.network.async_get_ipv4_broadcast_addresses", diff --git a/tests/components/folder_watcher/conftest.py b/tests/components/folder_watcher/conftest.py index 6de9c69d574..ed0adea7a7d 100644 --- a/tests/components/folder_watcher/conftest.py +++ b/tests/components/folder_watcher/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.folder_watcher.const import DOMAIN from homeassistant.config_entries import SOURCE_USER diff --git a/tests/components/forecast_solar/conftest.py b/tests/components/forecast_solar/conftest.py index d1eacad8dbe..01c1f6d8d32 100644 --- a/tests/components/forecast_solar/conftest.py +++ b/tests/components/forecast_solar/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Forecast.Solar integration tests.""" +from collections.abc import Generator from datetime import datetime, timedelta from unittest.mock import AsyncMock, MagicMock, patch from forecast_solar import models import pytest -from typing_extensions import Generator from homeassistant.components.forecast_solar.const import ( CONF_AZIMUTH, diff --git a/tests/components/forked_daapd/conftest.py b/tests/components/forked_daapd/conftest.py index b9dd7087aef..e9f315c030c 100644 --- a/tests/components/forked_daapd/conftest.py +++ b/tests/components/forked_daapd/conftest.py @@ -10,7 +10,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="config_entry") -def config_entry_fixture(): +def config_entry_fixture() -> MockConfigEntry: """Create hass config_entry fixture.""" data = { CONF_HOST: "192.168.1.1", diff --git a/tests/components/forked_daapd/test_browse_media.py b/tests/components/forked_daapd/test_browse_media.py index 805bcac3976..cbd278128ae 100644 --- a/tests/components/forked_daapd/test_browse_media.py +++ b/tests/components/forked_daapd/test_browse_media.py @@ -3,8 +3,6 @@ from http import HTTPStatus from unittest.mock import patch -import pytest - from homeassistant.components import media_source, spotify from homeassistant.components.forked_daapd.browse_media import ( MediaContent, @@ -19,13 +17,16 @@ from homeassistant.components.websocket_api import TYPE_RESULT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry from tests.typing import ClientSessionGenerator, WebSocketGenerator TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" async def test_async_browse_media( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media.""" @@ -203,7 +204,9 @@ async def test_async_browse_media( async def test_async_browse_media_not_found( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media not found.""" @@ -261,7 +264,9 @@ async def test_async_browse_media_not_found( async def test_async_browse_spotify( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browsing spotify.""" @@ -313,7 +318,9 @@ async def test_async_browse_spotify( async def test_async_browse_media_source( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, config_entry + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browsing media_source.""" @@ -361,7 +368,9 @@ async def test_async_browse_media_source( async def test_async_browse_image( - hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + config_entry: MockConfigEntry, ) -> None: """Test browse media images.""" @@ -416,8 +425,7 @@ async def test_async_browse_image( async def test_async_browse_image_missing( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry, - caplog: pytest.LogCaptureFixture, + config_entry: MockConfigEntry, ) -> None: """Test browse media images with no image available.""" diff --git a/tests/components/forked_daapd/test_config_flow.py b/tests/components/forked_daapd/test_config_flow.py index 593b527009b..076fffef59b 100644 --- a/tests/components/forked_daapd/test_config_flow.py +++ b/tests/components/forked_daapd/test_config_flow.py @@ -67,7 +67,7 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_config_flow(hass: HomeAssistant, config_entry) -> None: +async def test_config_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test that the user step works.""" with ( patch( @@ -102,7 +102,9 @@ async def test_config_flow(hass: HomeAssistant, config_entry) -> None: assert result["type"] is FlowResultType.ABORT -async def test_zeroconf_updates_title(hass: HomeAssistant, config_entry) -> None: +async def test_zeroconf_updates_title( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test that zeroconf updates title and aborts with same host.""" MockConfigEntry(domain=DOMAIN, data={CONF_HOST: "different host"}).add_to_hass(hass) config_entry.add_to_hass(hass) @@ -125,7 +127,9 @@ async def test_zeroconf_updates_title(hass: HomeAssistant, config_entry) -> None assert len(hass.config_entries.async_entries(DOMAIN)) == 2 -async def test_config_flow_no_websocket(hass: HomeAssistant, config_entry) -> None: +async def test_config_flow_no_websocket( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test config flow setup without websocket enabled on server.""" with patch( "homeassistant.components.forked_daapd.config_flow.ForkedDaapdAPI.test_connection", @@ -224,7 +228,7 @@ async def test_config_flow_zeroconf_valid(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM -async def test_options_flow(hass: HomeAssistant, config_entry) -> None: +async def test_options_flow(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test config flow options.""" with patch( @@ -251,7 +255,9 @@ async def test_options_flow(hass: HomeAssistant, config_entry) -> None: assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_async_setup_entry_not_ready(hass: HomeAssistant, config_entry) -> None: +async def test_async_setup_entry_not_ready( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test that a PlatformNotReady exception is thrown during platform setup.""" with patch( diff --git a/tests/components/forked_daapd/test_media_player.py b/tests/components/forked_daapd/test_media_player.py index dd2e03f435f..6d7d267eb63 100644 --- a/tests/components/forked_daapd/test_media_player.py +++ b/tests/components/forked_daapd/test_media_player.py @@ -1,6 +1,7 @@ """The media player tests for the forked_daapd media player platform.""" -from unittest.mock import patch +from typing import Any +from unittest.mock import Mock, patch import pytest @@ -63,9 +64,9 @@ from homeassistant.const import ( STATE_PAUSED, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse -from tests.common import async_mock_signal +from tests.common import MockConfigEntry, async_mock_signal TEST_MASTER_ENTITY_NAME = "media_player.owntone_server" TEST_ZONE_ENTITY_NAMES = [ @@ -288,7 +289,7 @@ SAMPLE_PLAYLISTS = [{"id": 7, "name": "test_playlist", "uri": "library:playlist: @pytest.fixture(name="get_request_return_values") -async def get_request_return_values_fixture(): +async def get_request_return_values_fixture() -> dict[str, Any]: """Get request return values we can change later.""" return { "config": SAMPLE_CONFIG, @@ -299,7 +300,11 @@ async def get_request_return_values_fixture(): @pytest.fixture(name="mock_api_object") -async def mock_api_object_fixture(hass, config_entry, get_request_return_values): +async def mock_api_object_fixture( + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_request_return_values: dict[str, Any], +) -> Mock: """Create mock api fixture.""" async def get_request_side_effect(update_type): @@ -341,8 +346,9 @@ async def mock_api_object_fixture(hass, config_entry, get_request_return_values) return mock_api.return_value +@pytest.mark.usefixtures("mock_api_object") async def test_unload_config_entry( - hass: HomeAssistant, config_entry, mock_api_object + hass: HomeAssistant, config_entry: MockConfigEntry ) -> None: """Test the player is set unavailable when the config entry is unloaded.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -352,7 +358,8 @@ async def test_unload_config_entry( assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -def test_master_state(hass: HomeAssistant, mock_api_object) -> None: +@pytest.mark.usefixtures("mock_api_object") +def test_master_state(hass: HomeAssistant) -> None: """Test master state attributes.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) assert state.state == STATE_PAUSED @@ -373,7 +380,7 @@ def test_master_state(hass: HomeAssistant, mock_api_object) -> None: async def test_no_update_when_get_request_returns_none( - hass: HomeAssistant, config_entry, mock_api_object + hass: HomeAssistant, config_entry: MockConfigEntry, mock_api_object: Mock ) -> None: """Test when get request returns None.""" @@ -399,8 +406,12 @@ async def test_no_update_when_get_request_returns_none( async def _service_call( - hass, entity_name, service, additional_service_data=None, blocking=True -): + hass: HomeAssistant, + entity_name: str, + service: str, + additional_service_data: dict[str, Any] | None = None, + blocking: bool = True, +) -> ServiceResponse: if additional_service_data is None: additional_service_data = {} return await hass.services.async_call( @@ -411,7 +422,7 @@ async def _service_call( ) -async def test_zone(hass: HomeAssistant, mock_api_object) -> None: +async def test_zone(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test zone attributes and methods.""" zone_entity_name = TEST_ZONE_ENTITY_NAMES[0] state = hass.states.get(zone_entity_name) @@ -450,7 +461,7 @@ async def test_zone(hass: HomeAssistant, mock_api_object) -> None: mock_api_object.change_output.assert_any_call(output_id, selected=True) -async def test_last_outputs_master(hass: HomeAssistant, mock_api_object) -> None: +async def test_last_outputs_master(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test restoration of _last_outputs.""" # Test turning on sends API call await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -467,7 +478,9 @@ async def test_last_outputs_master(hass: HomeAssistant, mock_api_object) -> None async def test_bunch_of_stuff_master( - hass: HomeAssistant, get_request_return_values, mock_api_object + hass: HomeAssistant, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, ) -> None: """Run bunch of stuff.""" await _service_call(hass, TEST_MASTER_ENTITY_NAME, SERVICE_TURN_ON) @@ -551,9 +564,8 @@ async def test_bunch_of_stuff_master( mock_api_object.clear_queue.assert_called_once() -async def test_async_play_media_from_paused( - hass: HomeAssistant, mock_api_object -) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_async_play_media_from_paused(hass: HomeAssistant) -> None: """Test async play media from paused.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -571,7 +583,9 @@ async def test_async_play_media_from_paused( async def test_async_play_media_announcement_from_stopped( - hass: HomeAssistant, get_request_return_values, mock_api_object + hass: HomeAssistant, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, ) -> None: """Test async play media announcement (from stopped).""" updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -597,9 +611,8 @@ async def test_async_play_media_announcement_from_stopped( assert state.last_updated > initial_state.last_updated -async def test_async_play_media_unsupported( - hass: HomeAssistant, mock_api_object -) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_async_play_media_unsupported(hass: HomeAssistant) -> None: """Test async play media on unsupported media type.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -616,7 +629,7 @@ async def test_async_play_media_unsupported( async def test_async_play_media_announcement_tts_timeout( - hass: HomeAssistant, mock_api_object + hass: HomeAssistant, mock_api_object: Mock ) -> None: """Test async play media announcement with TTS timeout.""" mock_api_object.add_to_queue.side_effect = None @@ -638,7 +651,7 @@ async def test_async_play_media_announcement_tts_timeout( async def test_use_pipe_control_with_no_api( - hass: HomeAssistant, mock_api_object + hass: HomeAssistant, mock_api_object: Mock ) -> None: """Test using pipe control with no api set.""" await _service_call( @@ -651,7 +664,8 @@ async def test_use_pipe_control_with_no_api( assert mock_api_object.start_playback.call_count == 0 -async def test_clear_source(hass: HomeAssistant, mock_api_object) -> None: +@pytest.mark.usefixtures("mock_api_object") +async def test_clear_source(hass: HomeAssistant) -> None: """Test changing source to clear.""" await _service_call( hass, @@ -665,8 +679,11 @@ async def test_clear_source(hass: HomeAssistant, mock_api_object) -> None: @pytest.fixture(name="pipe_control_api_object") async def pipe_control_api_object_fixture( - hass, config_entry, get_request_return_values, mock_api_object -): + hass: HomeAssistant, + config_entry: MockConfigEntry, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, +) -> Mock: """Fixture for mock librespot_java api.""" with patch( "homeassistant.components.forked_daapd.media_player.LibrespotJavaAPI", @@ -697,9 +714,9 @@ async def pipe_control_api_object_fixture( async def test_librespot_java_stuff( hass: HomeAssistant, - get_request_return_values, - mock_api_object, - pipe_control_api_object, + get_request_return_values: dict[str, Any], + mock_api_object: Mock, + pipe_control_api_object: Mock, ) -> None: """Test options update and librespot-java stuff.""" state = hass.states.get(TEST_MASTER_ENTITY_NAME) @@ -734,9 +751,8 @@ async def test_librespot_java_stuff( assert state.attributes[ATTR_MEDIA_ALBUM_NAME] == "some album" -async def test_librespot_java_play_announcement( - hass: HomeAssistant, pipe_control_api_object -) -> None: +@pytest.mark.usefixtures("pipe_control_api_object") +async def test_librespot_java_play_announcement(hass: HomeAssistant) -> None: """Test play announcement with librespot-java pipe.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -755,7 +771,7 @@ async def test_librespot_java_play_announcement( async def test_librespot_java_play_media_pause_timeout( - hass: HomeAssistant, pipe_control_api_object + hass: HomeAssistant, pipe_control_api_object: Mock ) -> None: """Test play media with librespot-java pipe.""" # test media play with pause timeout @@ -778,7 +794,7 @@ async def test_librespot_java_play_media_pause_timeout( assert state.last_updated > initial_state.last_updated -async def test_unsupported_update(hass: HomeAssistant, mock_api_object) -> None: +async def test_unsupported_update(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test unsupported update type.""" last_updated = hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated updater_update = mock_api_object.start_websocket_handler.call_args[0][2] @@ -787,7 +803,9 @@ async def test_unsupported_update(hass: HomeAssistant, mock_api_object) -> None: assert hass.states.get(TEST_MASTER_ENTITY_NAME).last_updated == last_updated -async def test_invalid_websocket_port(hass: HomeAssistant, config_entry) -> None: +async def test_invalid_websocket_port( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test invalid websocket port on async_init.""" with patch( "homeassistant.components.forked_daapd.media_player.ForkedDaapdAPI", @@ -800,7 +818,7 @@ async def test_invalid_websocket_port(hass: HomeAssistant, config_entry) -> None assert hass.states.get(TEST_MASTER_ENTITY_NAME).state == STATE_UNAVAILABLE -async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object) -> None: +async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test websocket disconnection.""" assert hass.states.get(TEST_MASTER_ENTITY_NAME).state != STATE_UNAVAILABLE assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state != STATE_UNAVAILABLE @@ -811,7 +829,9 @@ async def test_websocket_disconnect(hass: HomeAssistant, mock_api_object) -> Non assert hass.states.get(TEST_ZONE_ENTITY_NAMES[0]).state == STATE_UNAVAILABLE -async def test_async_play_media_enqueue(hass: HomeAssistant, mock_api_object) -> None: +async def test_async_play_media_enqueue( + hass: HomeAssistant, mock_api_object: Mock +) -> None: """Test async play media with different enqueue options.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -887,7 +907,7 @@ async def test_async_play_media_enqueue(hass: HomeAssistant, mock_api_object) -> ) -async def test_play_owntone_media(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_owntone_media(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with an owntone source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -913,7 +933,7 @@ async def test_play_owntone_media(hass: HomeAssistant, mock_api_object) -> None: ) -async def test_play_spotify_media(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_spotify_media(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) await _service_call( @@ -937,7 +957,7 @@ async def test_play_spotify_media(hass: HomeAssistant, mock_api_object) -> None: ) -async def test_play_media_source(hass: HomeAssistant, mock_api_object) -> None: +async def test_play_media_source(hass: HomeAssistant, mock_api_object: Mock) -> None: """Test async play media with a spotify source.""" initial_state = hass.states.get(TEST_MASTER_ENTITY_NAME) with patch( diff --git a/tests/components/freedompro/conftest.py b/tests/components/freedompro/conftest.py index 91eecc24f27..8e581673b92 100644 --- a/tests/components/freedompro/conftest.py +++ b/tests/components/freedompro/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from copy import deepcopy from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.freedompro.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 8666491eb7a..79639835003 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -1,6 +1,6 @@ """Tests for Fritz!Tools button platform.""" -import copy +from copy import deepcopy from datetime import timedelta from unittest.mock import patch @@ -11,9 +11,15 @@ from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow -from .const import MOCK_MESH_DATA, MOCK_NEW_DEVICE_NODE, MOCK_USER_DATA +from .const import ( + MOCK_HOST_ATTRIBUTES_DATA, + MOCK_MESH_DATA, + MOCK_NEW_DEVICE_NODE, + MOCK_USER_DATA, +) from tests.common import MockConfigEntry, async_fire_time_changed @@ -120,7 +126,7 @@ async def test_wol_button_new_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - mesh_data = copy.deepcopy(MOCK_MESH_DATA) + mesh_data = deepcopy(MOCK_MESH_DATA) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED @@ -148,7 +154,7 @@ async def test_wol_button_absent_for_mesh_slave( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - slave_mesh_data = copy.deepcopy(MOCK_MESH_DATA) + slave_mesh_data = deepcopy(MOCK_MESH_DATA) slave_mesh_data["nodes"][0]["mesh_role"] = MeshRoles.SLAVE fh_class_mock.get_mesh_topology.return_value = slave_mesh_data @@ -170,7 +176,7 @@ async def test_wol_button_absent_for_non_lan_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - printer_wifi_data = copy.deepcopy(MOCK_MESH_DATA) + printer_wifi_data = deepcopy(MOCK_MESH_DATA) # initialization logic uses the connection type of the `node_interface_1_uid` pair of the printer # ni-230 is wifi interface of fritzbox printer_node_interface = printer_wifi_data["nodes"][1]["node_interfaces"][0] @@ -184,3 +190,61 @@ async def test_wol_button_absent_for_non_lan_device( button = hass.states.get("button.printer_wake_on_lan") assert button is None + + +async def test_cleanup_button( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, +) -> None: + """Test cleanup of orphan devices.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED + + # check if tracked device is registered properly + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert entities + assert len(entities) == 3 + + # removed tracked device and trigger cleanup + host_attributes = deepcopy(MOCK_HOST_ATTRIBUTES_DATA) + host_attributes.pop(0) + fh_class_mock.get_hosts_attributes.return_value = host_attributes + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.mock_title_cleanup"}, + blocking=True, + ) + + await hass.async_block_till_done(wait_background_tasks=True) + + # check if orphan tracked device is removed + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert not device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert not entities diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 8d1da9d09d5..853c09c534b 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -263,10 +263,10 @@ async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> await hass.services.async_call( DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 123}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, True, ) - assert device.set_target_temperature.call_args_list == [call(123)] + assert device.set_target_temperature.call_args_list == [call(23)] async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> None: @@ -282,7 +282,7 @@ async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> Non { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) @@ -303,7 +303,7 @@ async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> No { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) diff --git a/tests/components/fronius/test_diagnostics.py b/tests/components/fronius/test_diagnostics.py index 7b1f384e405..ddef5b4a18c 100644 --- a/tests/components/fronius/test_diagnostics.py +++ b/tests/components/fronius/test_diagnostics.py @@ -1,6 +1,7 @@ """Tests for the diagnostics data provided by the Fronius integration.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,11 +22,8 @@ async def test_diagnostics( mock_responses(aioclient_mock) entry = await setup_fronius_integration(hass) - assert ( - await get_diagnostics_for_config_entry( - hass, - hass_client, - entry, - ) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, + hass_client, + entry, + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/frontend/test_init.py b/tests/components/frontend/test_init.py index 83c82abea35..5006adedd77 100644 --- a/tests/components/frontend/test_init.py +++ b/tests/components/frontend/test_init.py @@ -1,6 +1,7 @@ """The tests for Home Assistant frontend.""" from asyncio import AbstractEventLoop +from collections.abc import Generator from http import HTTPStatus from pathlib import Path import re @@ -64,7 +65,7 @@ CONFIG_THEMES = {DOMAIN: {CONF_THEMES: MOCK_THEMES}} @pytest.fixture -async def ignore_frontend_deps(hass): +async def ignore_frontend_deps(hass: HomeAssistant) -> None: """Frontend dependencies.""" frontend = await async_get_integration(hass, "frontend") for dep in frontend.dependencies: @@ -73,7 +74,7 @@ async def ignore_frontend_deps(hass): @pytest.fixture -async def frontend(hass, ignore_frontend_deps): +async def frontend(hass: HomeAssistant, ignore_frontend_deps: None) -> None: """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -83,7 +84,7 @@ async def frontend(hass, ignore_frontend_deps): @pytest.fixture -async def frontend_themes(hass): +async def frontend_themes(hass: HomeAssistant) -> None: """Frontend setup with themes.""" assert await async_setup_component( hass, @@ -104,7 +105,7 @@ def aiohttp_client( @pytest.fixture async def mock_http_client( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, frontend: None ) -> TestClient: """Start the Home Assistant HTTP component.""" return await aiohttp_client(hass.http.app) @@ -112,7 +113,7 @@ async def mock_http_client( @pytest.fixture async def themes_ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend_themes: None ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -120,7 +121,7 @@ async def themes_ws_client( @pytest.fixture async def ws_client( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, frontend: None ) -> MockHAClientWebSocket: """Start the Home Assistant HTTP component.""" return await hass_ws_client(hass) @@ -128,7 +129,9 @@ async def ws_client( @pytest.fixture async def mock_http_client_with_extra_js( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, ignore_frontend_deps + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + ignore_frontend_deps: None, ) -> TestClient: """Start the Home Assistant HTTP component.""" assert await async_setup_component( @@ -145,7 +148,7 @@ async def mock_http_client_with_extra_js( @pytest.fixture -def mock_onboarded(): +def mock_onboarded() -> Generator[None]: """Mock that we're onboarded.""" with patch( "homeassistant.components.onboarding.async_is_onboarded", return_value=True @@ -153,7 +156,8 @@ def mock_onboarded(): yield -async def test_frontend_and_static(mock_http_client, mock_onboarded) -> None: +@pytest.mark.usefixtures("mock_onboarded") +async def test_frontend_and_static(mock_http_client: TestClient) -> None: """Test if we can get the frontend.""" resp = await mock_http_client.get("") assert resp.status == 200 @@ -170,26 +174,31 @@ async def test_frontend_and_static(mock_http_client, mock_onboarded) -> None: assert "public" in resp.headers.get("cache-control") -async def test_dont_cache_service_worker(mock_http_client) -> None: +@pytest.mark.parametrize("sw_url", ["/sw-modern.js", "/sw-legacy.js"]) +async def test_dont_cache_service_worker( + mock_http_client: TestClient, sw_url: str +) -> None: """Test that we don't cache the service worker.""" - resp = await mock_http_client.get("/service_worker.js") + resp = await mock_http_client.get(sw_url) assert resp.status == 200 assert "cache-control" not in resp.headers -async def test_404(mock_http_client) -> None: +async def test_404(mock_http_client: TestClient) -> None: """Test for HTTP 404 error.""" resp = await mock_http_client.get("/not-existing") assert resp.status == HTTPStatus.NOT_FOUND -async def test_we_cannot_POST_to_root(mock_http_client) -> None: +async def test_we_cannot_POST_to_root(mock_http_client: TestClient) -> None: """Test that POST is not allow to root.""" resp = await mock_http_client.post("/") assert resp.status == 405 -async def test_themes_api(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_api( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test that /api/themes returns correct data.""" await themes_ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) msg = await themes_ws_client.receive_json() @@ -216,11 +225,11 @@ async def test_themes_api(hass: HomeAssistant, themes_ws_client) -> None: assert msg["result"]["themes"] == {} +@pytest.mark.usefixtures("ignore_frontend_deps") async def test_themes_persist( hass: HomeAssistant, hass_storage: dict[str, Any], hass_ws_client: WebSocketGenerator, - ignore_frontend_deps, ) -> None: """Test that theme settings are restores after restart.""" hass_storage[THEMES_STORAGE_KEY] = { @@ -242,11 +251,11 @@ async def test_themes_persist( assert msg["result"]["default_dark_theme"] == "dark" +@pytest.mark.usefixtures("frontend_themes") async def test_themes_save_storage( hass: HomeAssistant, hass_storage: dict[str, Any], freezer: FrozenDateTimeFactory, - frontend_themes, ) -> None: """Test that theme settings are restores after restart.""" @@ -270,7 +279,9 @@ async def test_themes_save_storage( } -async def test_themes_set_theme(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_set_theme( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test frontend.set_theme service.""" await hass.services.async_call( DOMAIN, "set_theme", {"name": "happy"}, blocking=True @@ -303,7 +314,7 @@ async def test_themes_set_theme(hass: HomeAssistant, themes_ws_client) -> None: async def test_themes_set_theme_wrong_name( - hass: HomeAssistant, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.set_theme service called with wrong name.""" @@ -318,7 +329,9 @@ async def test_themes_set_theme_wrong_name( assert msg["result"]["default_theme"] == "default" -async def test_themes_set_dark_theme(hass: HomeAssistant, themes_ws_client) -> None: +async def test_themes_set_dark_theme( + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket +) -> None: """Test frontend.set_theme service called with dark mode.""" await hass.services.async_call( @@ -358,8 +371,9 @@ async def test_themes_set_dark_theme(hass: HomeAssistant, themes_ws_client) -> N assert msg["result"]["default_dark_theme"] == "light_and_dark" +@pytest.mark.usefixtures("frontend") async def test_themes_set_dark_theme_wrong_name( - hass: HomeAssistant, frontend, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.set_theme service called with mode dark and wrong name.""" await hass.services.async_call( @@ -373,8 +387,9 @@ async def test_themes_set_dark_theme_wrong_name( assert msg["result"]["default_dark_theme"] is None +@pytest.mark.usefixtures("frontend") async def test_themes_reload_themes( - hass: HomeAssistant, frontend, themes_ws_client + hass: HomeAssistant, themes_ws_client: MockHAClientWebSocket ) -> None: """Test frontend.reload_themes service.""" @@ -395,7 +410,7 @@ async def test_themes_reload_themes( assert msg["result"]["default_theme"] == "default" -async def test_missing_themes(hass: HomeAssistant, ws_client) -> None: +async def test_missing_themes(ws_client: MockHAClientWebSocket) -> None: """Test that themes API works when themes are not defined.""" await ws_client.send_json({"id": 5, "type": "frontend/get_themes"}) @@ -412,7 +427,7 @@ async def test_missing_themes(hass: HomeAssistant, ws_client) -> None: async def test_extra_js( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client_with_extra_js, + mock_http_client_with_extra_js: TestClient, ) -> None: """Test that extra javascript is loaded.""" @@ -497,7 +512,7 @@ async def test_extra_js( async def test_get_panels( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_http_client, + mock_http_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test get_panels command.""" @@ -547,7 +562,7 @@ async def test_get_panels( async def test_get_panels_non_admin( - hass: HomeAssistant, ws_client, hass_admin_user: MockUser + hass: HomeAssistant, ws_client: MockHAClientWebSocket, hass_admin_user: MockUser ) -> None: """Test get_panels command.""" hass_admin_user.groups = [] @@ -568,7 +583,7 @@ async def test_get_panels_non_admin( assert "map" not in msg["result"] -async def test_get_translations(hass: HomeAssistant, ws_client) -> None: +async def test_get_translations(ws_client: MockHAClientWebSocket) -> None: """Test get_translations command.""" with patch( "homeassistant.components.frontend.async_get_translations", @@ -593,7 +608,7 @@ async def test_get_translations(hass: HomeAssistant, ws_client) -> None: async def test_get_translations_for_integrations( - hass: HomeAssistant, ws_client + ws_client: MockHAClientWebSocket, ) -> None: """Test get_translations for integrations command.""" with patch( @@ -621,7 +636,7 @@ async def test_get_translations_for_integrations( async def test_get_translations_for_single_integration( - hass: HomeAssistant, ws_client + ws_client: MockHAClientWebSocket, ) -> None: """Test get_translations for integration command.""" with patch( @@ -660,7 +675,7 @@ async def test_onboarding_load(hass: HomeAssistant) -> None: assert "onboarding" in frontend.dependencies -async def test_auth_authorize(mock_http_client) -> None: +async def test_auth_authorize(mock_http_client: TestClient) -> None: """Test the authorize endpoint works.""" resp = await mock_http_client.get( "/auth/authorize?response_type=code&client_id=https://localhost/&" @@ -683,7 +698,9 @@ async def test_auth_authorize(mock_http_client) -> None: assert "public" in resp.headers.get("cache-control") -async def test_get_version(hass: HomeAssistant, ws_client) -> None: +async def test_get_version( + hass: HomeAssistant, ws_client: MockHAClientWebSocket +) -> None: """Test get_version command.""" frontend = await async_get_integration(hass, "frontend") cur_version = next( @@ -701,7 +718,7 @@ async def test_get_version(hass: HomeAssistant, ws_client) -> None: assert msg["result"] == {"version": cur_version} -async def test_static_paths(hass: HomeAssistant, mock_http_client) -> None: +async def test_static_paths(mock_http_client: TestClient) -> None: """Test static paths.""" resp = await mock_http_client.get( "/.well-known/change-password", allow_redirects=False @@ -710,9 +727,8 @@ async def test_static_paths(hass: HomeAssistant, mock_http_client) -> None: assert resp.headers["location"] == "/profile" -async def test_manifest_json( - hass: HomeAssistant, frontend_themes, mock_http_client -) -> None: +@pytest.mark.usefixtures("frontend_themes") +async def test_manifest_json(hass: HomeAssistant, mock_http_client: TestClient) -> None: """Test for fetching manifest.json.""" resp = await mock_http_client.get("/manifest.json") assert resp.status == HTTPStatus.OK @@ -734,7 +750,7 @@ async def test_manifest_json( assert json["theme_color"] != DEFAULT_THEME_COLOR -async def test_static_path_cache(hass: HomeAssistant, mock_http_client) -> None: +async def test_static_path_cache(mock_http_client: TestClient) -> None: """Test static paths cache.""" resp = await mock_http_client.get("/lovelace/default_view", allow_redirects=False) assert resp.status == 404 @@ -766,7 +782,7 @@ async def test_static_path_cache(hass: HomeAssistant, mock_http_client) -> None: assert resp.status == 404 -async def test_get_icons(hass: HomeAssistant, ws_client: MockHAClientWebSocket) -> None: +async def test_get_icons(ws_client: MockHAClientWebSocket) -> None: """Test get_icons command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -787,9 +803,7 @@ async def test_get_icons(hass: HomeAssistant, ws_client: MockHAClientWebSocket) assert msg["result"] == {"resources": {}} -async def test_get_icons_for_integrations( - hass: HomeAssistant, ws_client: MockHAClientWebSocket -) -> None: +async def test_get_icons_for_integrations(ws_client: MockHAClientWebSocket) -> None: """Test get_icons for integrations command.""" with patch( "homeassistant.components.frontend.async_get_icons", @@ -814,7 +828,7 @@ async def test_get_icons_for_integrations( async def test_get_icons_for_single_integration( - hass: HomeAssistant, ws_client: MockHAClientWebSocket + ws_client: MockHAClientWebSocket, ) -> None: """Test get_icons for integration command.""" with patch( diff --git a/tests/components/frontend/test_storage.py b/tests/components/frontend/test_storage.py index 8b97fa9ee04..ce7f7aeb4a1 100644 --- a/tests/components/frontend/test_storage.py +++ b/tests/components/frontend/test_storage.py @@ -13,15 +13,13 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -def setup_frontend(hass): +def setup_frontend(hass: HomeAssistant) -> None: """Fixture to setup the frontend.""" hass.loop.run_until_complete(async_setup_component(hass, "frontend", {})) async def test_get_user_data_empty( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_user_data command.""" client = await hass_ws_client(hass) @@ -82,9 +80,7 @@ async def test_get_user_data( async def test_set_user_data_empty( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - hass_storage: dict[str, Any], + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test set_user_data command.""" client = await hass_ws_client(hass) diff --git a/tests/components/frontier_silicon/conftest.py b/tests/components/frontier_silicon/conftest.py index 2322740c69a..709b1842472 100644 --- a/tests/components/frontier_silicon/conftest.py +++ b/tests/components/frontier_silicon/conftest.py @@ -1,9 +1,9 @@ """Configuration for frontier_silicon tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.frontier_silicon.const import CONF_WEBFSAPI_URL, DOMAIN from homeassistant.const import CONF_PIN diff --git a/tests/components/fully_kiosk/conftest.py b/tests/components/fully_kiosk/conftest.py index 3f7c2985daf..028eefcf361 100644 --- a/tests/components/fully_kiosk/conftest.py +++ b/tests/components/fully_kiosk/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index de5dece776c..6a67ae75ec2 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -1,10 +1,10 @@ """Test helpers for FYTA.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN as FYTA_DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/fyta/test_diagnostics.py b/tests/components/fyta/test_diagnostics.py index 3a95b533489..cfaa5484b82 100644 --- a/tests/components/fyta/test_diagnostics.py +++ b/tests/components/fyta/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -28,4 +29,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/gardena_bluetooth/conftest.py b/tests/components/gardena_bluetooth/conftest.py index 08f698b4b67..882c9b1b090 100644 --- a/tests/components/gardena_bluetooth/conftest.py +++ b/tests/components/gardena_bluetooth/conftest.py @@ -1,6 +1,6 @@ """Common fixtures for the Gardena Bluetooth tests.""" -from collections.abc import Callable, Coroutine +from collections.abc import Callable, Coroutine, Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -10,7 +10,6 @@ from gardena_bluetooth.const import DeviceInformation from gardena_bluetooth.exceptions import CharacteristicNotFound from gardena_bluetooth.parse import Characteristic import pytest -from typing_extensions import Generator from homeassistant.components.gardena_bluetooth.const import DOMAIN from homeassistant.components.gardena_bluetooth.coordinator import SCAN_INTERVAL diff --git a/tests/components/gardena_bluetooth/snapshots/test_init.ambr b/tests/components/gardena_bluetooth/snapshots/test_init.ambr index 8cd77136f8f..71195918bb1 100644 --- a/tests/components/gardena_bluetooth/snapshots/test_init.ambr +++ b/tests/components/gardena_bluetooth/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': None, 'model': 'Mock Model', + 'model_id': None, 'name': 'Mock Title', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 92a9298cbd5..69e6cc6b696 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -1,7 +1,10 @@ """Test fixtures for the generic component.""" +from __future__ import annotations + +from collections.abc import Generator from io import BytesIO -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, _patch, patch from PIL import Image import pytest @@ -9,12 +12,14 @@ import respx from homeassistant import config_entries from homeassistant.components.generic.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(scope="package") -def fakeimgbytes_png(): +def fakeimgbytes_png() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() Image.new("RGB", (1, 1)).save(buf, format="PNG") @@ -22,7 +27,7 @@ def fakeimgbytes_png(): @pytest.fixture(scope="package") -def fakeimgbytes_jpg(): +def fakeimgbytes_jpg() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="jpeg") @@ -30,7 +35,7 @@ def fakeimgbytes_jpg(): @pytest.fixture(scope="package") -def fakeimgbytes_svg(): +def fakeimgbytes_svg() -> bytes: """Fake image in RAM for testing.""" return bytes( '', @@ -39,7 +44,7 @@ def fakeimgbytes_svg(): @pytest.fixture(scope="package") -def fakeimgbytes_gif(): +def fakeimgbytes_gif() -> bytes: """Fake image in RAM for testing.""" buf = BytesIO() # fake image in ram for testing. Image.new("RGB", (1, 1)).save(buf, format="gif") @@ -47,19 +52,27 @@ def fakeimgbytes_gif(): @pytest.fixture -def fakeimg_png(fakeimgbytes_png): +def fakeimg_png(fakeimgbytes_png: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_png + ) + yield + respx.pop("fake_img") @pytest.fixture -def fakeimg_gif(fakeimgbytes_gif): +def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_gif) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_gif + ) + yield + respx.pop("fake_img") @pytest.fixture(scope="package") -def mock_create_stream(): +def mock_create_stream() -> _patch[MagicMock]: """Mock create stream.""" mock_stream = Mock() mock_provider = Mock() @@ -75,7 +88,7 @@ def mock_create_stream(): @pytest.fixture -async def user_flow(hass): +async def user_flow(hass: HomeAssistant) -> ConfigFlowResult: """Initiate a user flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -87,7 +100,7 @@ async def user_flow(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass): +def config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -112,7 +125,9 @@ def config_entry_fixture(hass): @pytest.fixture -async def setup_entry(hass, config_entry): +async def setup_entry( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> MockConfigEntry: """Set up a config entry ready to be used in tests.""" await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/generic/test_camera.py b/tests/components/generic/test_camera.py index 72a7c32ba25..59ff513ccc9 100644 --- a/tests/components/generic/test_camera.py +++ b/tests/components/generic/test_camera.py @@ -73,7 +73,7 @@ async def help_setup_mock_config_entry( async def test_fetching_url( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, caplog: pytest.LogCaptureFixture, ) -> None: """Test that it fetches the given url.""" @@ -132,7 +132,7 @@ async def test_image_caching( hass: HomeAssistant, hass_client: ClientSessionGenerator, freezer: FrozenDateTimeFactory, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the image is cached and not fetched more often than the framerate indicates.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -197,7 +197,7 @@ async def test_image_caching( @respx.mock async def test_fetching_without_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes ) -> None: """Test that it fetches the given url when ssl verify is off.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -221,7 +221,7 @@ async def test_fetching_without_verify_ssl( @respx.mock async def test_fetching_url_with_verify_ssl( - hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_client: ClientSessionGenerator, fakeimgbytes_png: bytes ) -> None: """Test that it fetches the given url when ssl verify is explicitly on.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -247,8 +247,8 @@ async def test_fetching_url_with_verify_ssl( async def test_limit_refetch( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, - fakeimgbytes_jpg, + fakeimgbytes_png: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test that it fetches the given url.""" respx.get("http://example.com/0a").respond(stream=fakeimgbytes_png) @@ -319,7 +319,7 @@ async def test_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the stream source is rendered.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -376,7 +376,7 @@ async def test_stream_source_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test that the stream source has an error.""" respx.get("http://example.com").respond(stream=fakeimgbytes_png) @@ -418,7 +418,7 @@ async def test_stream_source_error( @respx.mock async def test_setup_alternative_options( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, fakeimgbytes_png: bytes ) -> None: """Test that the stream source is setup with different config options.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -442,7 +442,7 @@ async def test_no_stream_source( hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, - fakeimgbytes_png, + fakeimgbytes_png: bytes, ) -> None: """Test a stream request without stream source option set.""" respx.get("https://example.com").respond(stream=fakeimgbytes_png) @@ -482,8 +482,8 @@ async def test_no_stream_source( async def test_camera_content_type( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_svg, - fakeimgbytes_jpg, + fakeimgbytes_svg: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test generic camera with custom content_type.""" urlsvg = "https://upload.wikimedia.org/wikipedia/commons/0/02/SVG_logo.svg" @@ -532,8 +532,8 @@ async def test_camera_content_type( async def test_timeout_cancelled( hass: HomeAssistant, hass_client: ClientSessionGenerator, - fakeimgbytes_png, - fakeimgbytes_jpg, + fakeimgbytes_png: bytes, + fakeimgbytes_jpg: bytes, ) -> None: """Test that timeouts and cancellations return last image.""" diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index 7e76d8f3891..e7af9383791 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -1,10 +1,13 @@ """Test The generic (IP Camera) config flow.""" +from __future__ import annotations + import contextlib import errno from http import HTTPStatus import os.path -from unittest.mock import AsyncMock, PropertyMock, patch +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, PropertyMock, _patch, patch import httpx import pytest @@ -27,7 +30,7 @@ from homeassistant.components.stream import ( CONF_USE_WALLCLOCK_AS_TIMESTAMPS, ) from homeassistant.components.stream.worker import StreamWorkerError -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult from homeassistant.const import ( CONF_AUTHENTICATION, CONF_NAME, @@ -38,6 +41,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -67,10 +71,10 @@ TESTDATA_YAML = { @respx.mock async def test_form( hass: HomeAssistant, - fakeimgbytes_png, + fakeimgbytes_png: bytes, hass_client: ClientSessionGenerator, - user_flow, - mock_create_stream, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test the form with a normal set of settings.""" @@ -121,8 +125,9 @@ async def test_form( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_only_stillimage( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if the user wants still images only.""" result = await hass.config_entries.flow.async_init( @@ -163,7 +168,10 @@ async def test_form_only_stillimage( @respx.mock async def test_form_reject_still_preview( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream, user_flow + hass: HomeAssistant, + fakeimgbytes_png: bytes, + mock_create_stream: _patch[MagicMock], + user_flow: ConfigFlowResult, ) -> None: """Test we go back to the config screen if the user rejects the still preview.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -183,11 +191,11 @@ async def test_form_reject_still_preview( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_still_preview_cam_off( hass: HomeAssistant, - fakeimg_png, - mock_create_stream, - user_flow, + mock_create_stream: _patch[MagicMock], + user_flow: ConfigFlowResult, hass_client: ClientSessionGenerator, ) -> None: """Test camera errors are triggered during preview.""" @@ -212,8 +220,9 @@ async def test_form_still_preview_cam_off( @respx.mock +@pytest.mark.usefixtures("fakeimg_gif") async def test_form_only_stillimage_gif( - hass: HomeAssistant, fakeimg_gif, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if the user wants a gif.""" data = TESTDATA.copy() @@ -236,7 +245,7 @@ async def test_form_only_stillimage_gif( @respx.mock async def test_form_only_svg_whitespace( - hass: HomeAssistant, fakeimgbytes_svg, user_flow + hass: HomeAssistant, fakeimgbytes_svg: bytes, user_flow: ConfigFlowResult ) -> None: """Test we complete ok if svg starts with whitespace, issue #68889.""" fakeimgbytes_wspace_svg = bytes(" \n ", encoding="utf-8") + fakeimgbytes_svg @@ -270,12 +279,12 @@ async def test_form_only_svg_whitespace( ], ) async def test_form_only_still_sample( - hass: HomeAssistant, user_flow, image_file + hass: HomeAssistant, user_flow: ConfigFlowResult, image_file ) -> None: """Test various sample images #69037.""" image_path = os.path.join(os.path.dirname(__file__), image_file) - with open(image_path, "rb") as image: - respx.get("http://127.0.0.1/testurl/1").respond(stream=image.read()) + image_bytes = await hass.async_add_executor_job(Path(image_path).read_bytes) + respx.get("http://127.0.0.1/testurl/1").respond(stream=image_bytes) data = TESTDATA.copy() data.pop(CONF_STREAM_SOURCE) with patch("homeassistant.components.generic.async_setup_entry", return_value=True): @@ -332,8 +341,8 @@ async def test_form_only_still_sample( ) async def test_still_template( hass: HomeAssistant, - user_flow, - fakeimgbytes_png, + user_flow: ConfigFlowResult, + fakeimgbytes_png: bytes, template, url, expected_result, @@ -358,8 +367,11 @@ async def test_still_template( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_rtsp_mode( - hass: HomeAssistant, fakeimg_png, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we complete ok if the user enters a stream url.""" data = TESTDATA.copy() @@ -398,7 +410,10 @@ async def test_form_rtsp_mode( async def test_form_only_stream( - hass: HomeAssistant, fakeimgbytes_jpg, user_flow, mock_create_stream + hass: HomeAssistant, + fakeimgbytes_jpg: bytes, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we complete ok if the user wants stream only.""" data = TESTDATA.copy() @@ -434,7 +449,7 @@ async def test_form_only_stream( async def test_form_still_and_stream_not_provided( - hass: HomeAssistant, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we show a suitable error if neither still or stream URL are provided.""" result2 = await hass.config_entries.flow.async_configure( @@ -481,7 +496,11 @@ async def test_form_still_and_stream_not_provided( ], ) async def test_form_image_http_exceptions( - side_effect, expected_message, hass: HomeAssistant, user_flow, mock_create_stream + side_effect, + expected_message, + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle image http exceptions.""" respx.get("http://127.0.0.1/testurl/1").side_effect = [ @@ -501,7 +520,9 @@ async def test_form_image_http_exceptions( @respx.mock async def test_form_stream_invalidimage( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=b"invalid") @@ -518,7 +539,9 @@ async def test_form_stream_invalidimage( @respx.mock async def test_form_stream_invalidimage2( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=None) @@ -535,7 +558,9 @@ async def test_form_stream_invalidimage2( @respx.mock async def test_form_stream_invalidimage3( - hass: HomeAssistant, user_flow, mock_create_stream + hass: HomeAssistant, + user_flow: ConfigFlowResult, + mock_create_stream: _patch[MagicMock], ) -> None: """Test we handle invalid image when a stream is specified.""" respx.get("http://127.0.0.1/testurl/1").respond(content=bytes([0xFF])) @@ -551,7 +576,10 @@ async def test_form_stream_invalidimage3( @respx.mock -async def test_form_stream_timeout(hass: HomeAssistant, fakeimg_png, user_flow) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_stream_timeout( + hass: HomeAssistant, user_flow: ConfigFlowResult +) -> None: """Test we handle invalid auth.""" with patch( "homeassistant.components.generic.config_flow.create_stream" @@ -570,8 +598,49 @@ async def test_form_stream_timeout(hass: HomeAssistant, fakeimg_png, user_flow) @respx.mock +async def test_form_stream_not_set_up(hass: HomeAssistant, user_flow) -> None: + """Test we handle if stream has not been set up.""" + TESTDATA_ONLY_STREAM = TESTDATA.copy() + TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) + + with patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=HomeAssistantError("Stream integration is not set up."), + ): + result1 = await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + TESTDATA_ONLY_STREAM, + ) + await hass.async_block_till_done() + + assert result1["type"] is FlowResultType.FORM + assert result1["errors"] == {"stream_source": "stream_not_set_up"} + + +@respx.mock +async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: + """Test the unknown error for streams.""" + TESTDATA_ONLY_STREAM = TESTDATA.copy() + TESTDATA_ONLY_STREAM.pop(CONF_STILL_IMAGE_URL) + + with ( + patch( + "homeassistant.components.generic.config_flow.create_stream", + side_effect=HomeAssistantError("Some other error."), + ), + pytest.raises(HomeAssistantError), + ): + await hass.config_entries.flow.async_configure( + user_flow["flow_id"], + TESTDATA_ONLY_STREAM, + ) + await hass.async_block_till_done() + + +@respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_worker_error( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle a StreamWorkerError and pass the message through.""" with patch( @@ -588,7 +657,7 @@ async def test_form_stream_worker_error( @respx.mock async def test_form_stream_permission_error( - hass: HomeAssistant, fakeimgbytes_png, user_flow + hass: HomeAssistant, fakeimgbytes_png: bytes, user_flow: ConfigFlowResult ) -> None: """Test we handle permission error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -605,8 +674,9 @@ async def test_form_stream_permission_error( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_no_route_to_host( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle no route to host.""" with patch( @@ -622,8 +692,9 @@ async def test_form_no_route_to_host( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_io_error( - hass: HomeAssistant, fakeimg_png, user_flow + hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: """Test we handle no io error when setting up stream.""" with patch( @@ -639,7 +710,8 @@ async def test_form_stream_io_error( @respx.mock -async def test_form_oserror(hass: HomeAssistant, fakeimg_png, user_flow) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_form_oserror(hass: HomeAssistant, user_flow: ConfigFlowResult) -> None: """Test we handle OS error when setting up stream.""" with ( patch( @@ -656,7 +728,7 @@ async def test_form_oserror(hass: HomeAssistant, fakeimg_png, user_flow) -> None @respx.mock async def test_options_template_error( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream + hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] ) -> None: """Test the options flow with a template error.""" respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) @@ -754,7 +826,7 @@ async def test_slug(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> No @respx.mock async def test_options_only_stream( - hass: HomeAssistant, fakeimgbytes_png, mock_create_stream + hass: HomeAssistant, fakeimgbytes_png: bytes, mock_create_stream: _patch[MagicMock] ) -> None: """Test the options flow without a still_image_url.""" respx.get("http://127.0.0.1/testurl/2").respond(stream=fakeimgbytes_png) @@ -791,7 +863,8 @@ async def test_options_only_stream( assert result3["data"][CONF_CONTENT_TYPE] == "image/jpeg" -async def test_unload_entry(hass: HomeAssistant, fakeimg_png) -> None: +@pytest.mark.usefixtures("fakeimg_png") +async def test_unload_entry(hass: HomeAssistant) -> None: """Test unloading the generic IP Camera entry.""" mock_entry = MockConfigEntry(domain=DOMAIN, options=TESTDATA) mock_entry.add_to_hass(hass) @@ -861,8 +934,9 @@ async def test_migrate_existing_ids( @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_use_wallclock_as_timestamps_option( - hass: HomeAssistant, fakeimg_png, mock_create_stream + hass: HomeAssistant, mock_create_stream: _patch[MagicMock] ) -> None: """Test the use_wallclock_as_timestamps option flow.""" diff --git a/tests/components/generic/test_diagnostics.py b/tests/components/generic/test_diagnostics.py index f68c3ba4bc6..80fa5fd4d4e 100644 --- a/tests/components/generic/test_diagnostics.py +++ b/tests/components/generic/test_diagnostics.py @@ -6,12 +6,15 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.generic.diagnostics import redact_url from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, setup_entry + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + setup_entry: MockConfigEntry, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index eadc1b22527..fc46db48664 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -7,6 +7,9 @@ import pytest import voluptuous as vol from homeassistant.components import input_boolean, switch +from homeassistant.components.generic_hygrostat import ( + DOMAIN as GENERIC_HYDROSTAT_DOMAIN, +) from homeassistant.components.humidifier import ( ATTR_HUMIDITY, DOMAIN, @@ -26,17 +29,18 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HASS_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, State, callback, ) -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.common import ( + MockConfigEntry, assert_setup_component, async_fire_time_changed, mock_restore_cache, @@ -83,13 +87,14 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass): +async def setup_comp_1(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -async def test_humidifier_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: +@pytest.mark.usefixtures("setup_comp_1") +async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: """Test humidifier switching input_boolean.""" humidifier_switch = "input_boolean.test" assert await async_setup_component( @@ -128,8 +133,9 @@ async def test_humidifier_input_boolean(hass: HomeAssistant, setup_comp_1) -> No assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" +@pytest.mark.usefixtures("setup_comp_1") async def test_humidifier_switch( - hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, mock_switch_entities: list[MockSwitch] ) -> None: """Test humidifier switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -172,8 +178,9 @@ async def test_humidifier_switch( assert hass.states.get(ENTITY).attributes.get("action") == "humidifying" +@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -205,7 +212,7 @@ def _setup_sensor(hass, humidity): @pytest.fixture -async def setup_comp_0(hass): +async def setup_comp_0(hass: HomeAssistant) -> None: """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) @@ -231,7 +238,7 @@ async def setup_comp_0(hass): @pytest.fixture -async def setup_comp_2(hass): +async def setup_comp_2(hass: HomeAssistant) -> None: """Initialize components.""" _setup_sensor(hass, 45) hass.states.async_set(ENT_SWITCH, STATE_OFF) @@ -303,7 +310,8 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE -async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_default_setup_params(hass: HomeAssistant) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -312,9 +320,8 @@ async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("action") == "idle" -async def test_default_setup_params_dehumidifier( - hass: HomeAssistant, setup_comp_0 -) -> None: +@pytest.mark.usefixtures("setup_comp_0") +async def test_default_setup_params_dehumidifier(hass: HomeAssistant) -> None: """Test the setup with default parameters for dehumidifier.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_humidity") == 0 @@ -323,14 +330,16 @@ async def test_default_setup_params_dehumidifier( assert state.attributes.get("action") == "idle" -async def test_get_modes(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_get_modes(hass: HomeAssistant) -> None: """Test that the attributes returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("available_modes") assert modes == [MODE_NORMAL, MODE_AWAY] -async def test_set_target_humidity(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity(hass: HomeAssistant) -> None: """Test the setting of the target humidity.""" await hass.services.async_call( DOMAIN, @@ -353,7 +362,8 @@ async def test_set_target_humidity(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("humidity") == 40 -async def test_set_away_mode(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode(hass: HomeAssistant) -> None: """Test the setting away mode.""" await hass.services.async_call( DOMAIN, @@ -373,9 +383,8 @@ async def test_set_away_mode(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("humidity") == 35 -async def test_set_away_mode_and_restore_prev_humidity( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> None: """Test the setting and removing away mode. Verify original humidity is restored. @@ -407,8 +416,9 @@ async def test_set_away_mode_and_restore_prev_humidity( assert state.attributes.get("humidity") == 44 +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_humidity( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test the setting away mode twice in a row. @@ -448,7 +458,8 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( assert state.attributes.get("humidity") == 44 -async def test_sensor_affects_attribute(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_affects_attribute(hass: HomeAssistant) -> None: """Test that the sensor changes are reflected in the current_humidity attribute.""" state = hass.states.get(ENTITY) assert state.attributes.get("current_humidity") == 45 @@ -460,7 +471,8 @@ async def test_sensor_affects_attribute(hass: HomeAssistant, setup_comp_2) -> No assert state.attributes.get("current_humidity") == 47 -async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_bad_value(hass: HomeAssistant) -> None: """Test sensor that have None as state.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -470,8 +482,9 @@ async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: assert hass.states.get(ENTITY).state == STATE_UNAVAILABLE +@pytest.mark.usefixtures("setup_comp_2") async def test_sensor_bad_value_twice( - hass: HomeAssistant, setup_comp_2, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test sensor that the second bad value is not logged as warning.""" assert hass.states.get(ENTITY).state == STATE_ON @@ -499,9 +512,8 @@ async def test_sensor_bad_value_twice( ] == ["DEBUG"] -async def test_set_target_humidity_humidifier_on( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: """Test if target humidity turn humidifier on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 36) @@ -515,14 +527,13 @@ async def test_set_target_humidity_humidifier_on( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_humidity_humidifier_off( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: """Test if target humidity turn humidifier off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -536,13 +547,14 @@ async def test_set_target_humidity_humidifier_off( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_within_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn on within tolerance.""" calls = await _setup_switch(hass, False) @@ -558,8 +570,9 @@ async def test_humidity_change_humidifier_on_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_on_outside_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier on outside dry tolerance.""" calls = await _setup_switch(hass, False) @@ -574,13 +587,14 @@ async def test_humidity_change_humidifier_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_within_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn off within tolerance.""" calls = await _setup_switch(hass, True) @@ -596,8 +610,9 @@ async def test_humidity_change_humidifier_off_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_2") async def test_humidity_change_humidifier_off_outside_tolerance( - hass: HomeAssistant, setup_comp_2 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier off outside wet tolerance.""" calls = await _setup_switch(hass, True) @@ -612,12 +627,13 @@ async def test_humidity_change_humidifier_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_operation_mode_humidify(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_operation_mode_humidify(hass: HomeAssistant) -> None: """Test change mode from OFF to HUMIDIFY. Switch turns on when humidity below setpoint and mode changes. @@ -648,7 +664,7 @@ async def test_operation_mode_humidify(hass: HomeAssistant, setup_comp_2) -> Non await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH @@ -671,7 +687,7 @@ async def _setup_switch(hass, is_on): @pytest.fixture -async def setup_comp_3(hass): +async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -694,7 +710,8 @@ async def setup_comp_3(hass): await hass.async_block_till_done() -async def test_set_target_humidity_dry_off(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: """Test if target humidity turn dry off.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 50) @@ -708,13 +725,14 @@ async def test_set_target_humidity_dry_off(hass: HomeAssistant, setup_comp_3) -> await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "drying" -async def test_turn_away_mode_on_drying(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: """Test the setting away mode when drying.""" await _setup_switch(hass, True) _setup_sensor(hass, 50) @@ -737,7 +755,8 @@ async def test_turn_away_mode_on_drying(hass: HomeAssistant, setup_comp_3) -> No assert state.attributes.get("humidity") == 30 -async def test_operation_mode_dry(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_operation_mode_dry(hass: HomeAssistant) -> None: """Test change mode from OFF to DRY. Switch turns on when humidity below setpoint and state changes. @@ -765,38 +784,39 @@ async def test_operation_mode_dry(hass: HomeAssistant, setup_comp_3) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_humidity_dry_on(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_humidity_dry_on(hass: HomeAssistant) -> None: """Test if target humidity turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_init_ignores_tolerance(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_init_ignores_tolerance(hass: HomeAssistant) -> None: """Test if tolerance is ignored on initialization.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 39) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_humidity_change_dry_off_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_off_within_tolerance(hass: HomeAssistant) -> None: """Test if humidity change doesn't turn dry off within tolerance.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -805,8 +825,9 @@ async def test_humidity_change_dry_off_within_tolerance( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_3") async def test_set_humidity_change_dry_off_outside_tolerance( - hass: HomeAssistant, setup_comp_3 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry off.""" calls = await _setup_switch(hass, True) @@ -814,14 +835,13 @@ async def test_set_humidity_change_dry_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_humidity_change_dry_on_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_on_within_tolerance(hass: HomeAssistant) -> None: """Test if humidity change doesn't turn dry on within tolerance.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 37) @@ -830,23 +850,21 @@ async def test_humidity_change_dry_on_within_tolerance( assert len(calls) == 0 -async def test_humidity_change_dry_on_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_humidity_change_dry_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 45) await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_operating_mode_is_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -860,15 +878,14 @@ async def test_running_when_operating_mode_is_off_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH assert hass.states.get(ENTITY).attributes.get("action") == "off" -async def test_no_state_change_when_operation_mode_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 30) @@ -887,7 +904,7 @@ async def test_no_state_change_when_operation_mode_off_2( @pytest.fixture -async def setup_comp_4(hass): +async def setup_comp_4(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -910,8 +927,9 @@ async def setup_comp_4(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_4 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, False) @@ -924,9 +942,8 @@ async def test_humidity_change_dry_trigger_on_not_long_enough( assert len(calls) == 0 -async def test_humidity_change_dry_trigger_on_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_humidity_change_dry_trigger_on_long_enough(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -939,13 +956,14 @@ async def test_humidity_change_dry_trigger_on_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_4") async def test_humidity_change_dry_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_4 + hass: HomeAssistant, ) -> None: """Test if humidity change turn dry on.""" calls = await _setup_switch(hass, True) @@ -958,9 +976,8 @@ async def test_humidity_change_dry_trigger_off_not_long_enough( assert len(calls) == 0 -async def test_humidity_change_dry_trigger_off_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_humidity_change_dry_trigger_off_long_enough(hass: HomeAssistant) -> None: """Test if humidity change turn dry on.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) with freeze_time(fake_changed): @@ -973,14 +990,13 @@ async def test_humidity_change_dry_trigger_off_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_mode_change_dry_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) -> None: """Test if mode change turns dry off despite minimum cycle.""" calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) @@ -1000,9 +1016,8 @@ async def test_mode_change_dry_trigger_off_not_long_enough( assert call.data["entity_id"] == ENT_SWITCH -async def test_mode_change_dry_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_4 -) -> None: +@pytest.mark.usefixtures("setup_comp_4") +async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) -> None: """Test if mode change turns dry on despite minimum cycle.""" calls = await _setup_switch(hass, False) _setup_sensor(hass, 35) @@ -1032,7 +1047,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough( @pytest.fixture -async def setup_comp_6(hass): +async def setup_comp_6(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1054,8 +1069,9 @@ async def setup_comp_6(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn humidifier off because of time.""" calls = await _setup_switch(hass, True) @@ -1068,8 +1084,9 @@ async def test_humidity_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change doesn't turn humidifier on because of time.""" calls = await _setup_switch(hass, False) @@ -1082,8 +1099,9 @@ async def test_humidity_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_on_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier on after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1097,13 +1115,14 @@ async def test_humidity_change_humidifier_trigger_on_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_humidity_change_humidifier_trigger_off_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if humidity change turn humidifier off after min cycle.""" fake_changed = datetime.datetime(1970, 11, 11, 11, 11, 11, tzinfo=datetime.UTC) @@ -1117,13 +1136,14 @@ async def test_humidity_change_humidifier_trigger_off_long_enough( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_off_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if mode change turns humidifier off despite minimum cycle.""" calls = await _setup_switch(hass, True) @@ -1145,8 +1165,9 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_6") async def test_mode_change_humidifier_trigger_on_not_long_enough( - hass: HomeAssistant, setup_comp_6 + hass: HomeAssistant, ) -> None: """Test if mode change turns humidifier on despite minimum cycle.""" calls = await _setup_switch(hass, False) @@ -1182,7 +1203,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( @pytest.fixture -async def setup_comp_7(hass): +async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1206,8 +1227,9 @@ async def setup_comp_7(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_on_long_enough_3( - hass: HomeAssistant, setup_comp_7 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1221,13 +1243,14 @@ async def test_humidity_change_dry_trigger_on_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_7") async def test_humidity_change_dry_trigger_off_long_enough_3( - hass: HomeAssistant, setup_comp_7 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1241,13 +1264,13 @@ async def test_humidity_change_dry_trigger_off_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass): +async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1270,8 +1293,9 @@ async def setup_comp_8(hass): await hass.async_block_till_done() +@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_on_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, True) @@ -1285,13 +1309,14 @@ async def test_humidity_change_humidifier_trigger_on_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_8") async def test_humidity_change_humidifier_trigger_off_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = await _setup_switch(hass, False) @@ -1305,7 +1330,7 @@ async def test_humidity_change_humidifier_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1360,7 +1385,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1702,8 +1727,9 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: assert state.state == STATE_OFF +@pytest.mark.usefixtures("setup_comp_1") async def test_sensor_stale_duration( - hass: HomeAssistant, setup_comp_1, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test turn off on sensor stale.""" @@ -1782,3 +1808,50 @@ async def test_sensor_stale_duration( # Not turning on by itself assert hass.states.get(humidifier_switch).state == STATE_OFF + + +async def test_device_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device.""" + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_HYDROSTAT_DOMAIN, + options={ + "device_class": "humidifier", + "dry_tolerance": 2.0, + "humidifier": "switch.test_source", + "name": "Test", + "target_sensor": ENT_SENSOR, + "wet_tolerance": 4.0, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_hygrostat/test_init.py b/tests/components/generic_hygrostat/test_init.py new file mode 100644 index 00000000000..bd4792f939d --- /dev/null +++ b/tests/components/generic_hygrostat/test_init.py @@ -0,0 +1,102 @@ +"""Test Generic Hygrostat component setup process.""" + +from __future__ import annotations + +from homeassistant.components.generic_hygrostat import ( + DOMAIN as GENERIC_HYDROSTAT_DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from .test_humidifier import ENT_SENSOR + +from tests.common import MockConfigEntry + + +async def test_device_cleaning( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleaning of devices linked to the helper config entry.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + # Configure the configuration entry for helper + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_HYDROSTAT_DOMAIN, + options={ + "device_class": "humidifier", + "dry_tolerance": 2.0, + "humidifier": "switch.test_source", + "name": "Test", + "target_sensor": ENT_SENSOR, + "wet_tolerance": 4.0, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to config entry + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, 3 devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("humidifier.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 1ecde733f48..0f438056fbd 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -21,7 +21,7 @@ from homeassistant.components.climate import ( PRESET_SLEEP, HVACMode, ) -from homeassistant.components.generic_thermostat import ( +from homeassistant.components.generic_thermostat.const import ( DOMAIN as GENERIC_THERMOSTAT_DOMAIN, ) from homeassistant.const import ( @@ -37,19 +37,20 @@ from homeassistant.const import ( ) import homeassistant.core as ha from homeassistant.core import ( - DOMAIN as HASS_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, State, callback, ) from homeassistant.exceptions import ServiceValidationError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM from tests.common import ( + MockConfigEntry, assert_setup_component, async_fire_time_changed, async_mock_service, @@ -102,14 +103,15 @@ async def test_valid_conf(hass: HomeAssistant) -> None: @pytest.fixture -async def setup_comp_1(hass): +async def setup_comp_1(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() -async def test_heater_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: +@pytest.mark.usefixtures("setup_comp_1") +async def test_heater_input_boolean(hass: HomeAssistant) -> None: """Test heater switching input_boolean.""" heater_switch = "input_boolean.test" assert await async_setup_component( @@ -141,8 +143,9 @@ async def test_heater_input_boolean(hass: HomeAssistant, setup_comp_1) -> None: assert hass.states.get(heater_switch).state == STATE_ON +@pytest.mark.usefixtures("setup_comp_1") async def test_heater_switch( - hass: HomeAssistant, setup_comp_1, mock_switch_entities: list[MockSwitch] + hass: HomeAssistant, mock_switch_entities: list[MockSwitch] ) -> None: """Test heater switching test switch.""" setup_test_component_platform(hass, switch.DOMAIN, mock_switch_entities) @@ -177,8 +180,9 @@ async def test_heater_switch( assert hass.states.get(heater_switch).state == STATE_ON +@pytest.mark.usefixtures("setup_comp_1") async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp_1 + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test setting a unique ID.""" unique_id = "some_unique_id" @@ -210,7 +214,7 @@ def _setup_sensor(hass, temp): @pytest.fixture -async def setup_comp_2(hass): +async def setup_comp_2(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.units = METRIC_SYSTEM assert await async_setup_component( @@ -283,7 +287,8 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: assert hass.states.get(ENTITY).attributes["current_temperature"] == 18 -async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_default_setup_params(hass: HomeAssistant) -> None: """Test the setup with default parameters.""" state = hass.states.get(ENTITY) assert state.attributes.get("min_temp") == 7 @@ -292,14 +297,16 @@ async def test_default_setup_params(hass: HomeAssistant, setup_comp_2) -> None: assert state.attributes.get("target_temp_step") == 0.1 -async def test_get_hvac_modes(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_get_hvac_modes(hass: HomeAssistant) -> None: """Test that the operation list returns the correct modes.""" state = hass.states.get(ENTITY) modes = state.attributes.get("hvac_modes") assert modes == [HVACMode.HEAT, HVACMode.OFF] -async def test_set_target_temp(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp(hass: HomeAssistant) -> None: """Test the setting of the target temperature.""" await common.async_set_temperature(hass, 30) state = hass.states.get(ENTITY) @@ -322,7 +329,8 @@ async def test_set_target_temp(hass: HomeAssistant, setup_comp_2) -> None: (PRESET_ACTIVITY, 21), ], ) -async def test_set_away_mode(hass: HomeAssistant, setup_comp_2, preset, temp) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_away_mode(hass: HomeAssistant, preset, temp) -> None: """Test the setting away mode.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, preset) @@ -342,8 +350,9 @@ async def test_set_away_mode(hass: HomeAssistant, setup_comp_2, preset, temp) -> (PRESET_ACTIVITY, 21), ], ) +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_and_restore_prev_temp( - hass: HomeAssistant, setup_comp_2, preset, temp + hass: HomeAssistant, preset, temp ) -> None: """Test the setting and removing away mode. @@ -370,8 +379,9 @@ async def test_set_away_mode_and_restore_prev_temp( (PRESET_ACTIVITY, 21), ], ) +@pytest.mark.usefixtures("setup_comp_2") async def test_set_away_mode_twice_and_restore_prev_temp( - hass: HomeAssistant, setup_comp_2, preset, temp + hass: HomeAssistant, preset, temp ) -> None: """Test the setting away mode twice in a row. @@ -387,7 +397,8 @@ async def test_set_away_mode_twice_and_restore_prev_temp( assert state.attributes.get("temperature") == 23 -async def test_set_preset_mode_invalid(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_preset_mode_invalid(hass: HomeAssistant) -> None: """Test an invalid mode raises an error and ignore case when checking modes.""" await common.async_set_temperature(hass, 23) await common.async_set_preset_mode(hass, "away") @@ -402,7 +413,8 @@ async def test_set_preset_mode_invalid(hass: HomeAssistant, setup_comp_2) -> Non assert state.attributes.get("preset_mode") == "none" -async def test_sensor_bad_value(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_sensor_bad_value(hass: HomeAssistant) -> None: """Test sensor that have None as state.""" state = hass.states.get(ENTITY) temp = state.attributes.get("current_temperature") @@ -463,7 +475,8 @@ async def test_sensor_unavailable(hass: HomeAssistant) -> None: assert state.attributes.get("current_temperature") is None -async def test_set_target_temp_heater_on(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp_heater_on(hass: HomeAssistant) -> None: """Test if target temperature turn heater on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 25) @@ -471,12 +484,13 @@ async def test_set_target_temp_heater_on(hass: HomeAssistant, setup_comp_2) -> N await common.async_set_temperature(hass, 30) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_temp_heater_off(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_set_target_temp_heater_off(hass: HomeAssistant) -> None: """Test if target temperature turn heater off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 30) @@ -484,14 +498,13 @@ async def test_set_target_temp_heater_off(hass: HomeAssistant, setup_comp_2) -> await common.async_set_temperature(hass, 25) assert len(calls) == 2 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_heater_on_within_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_on_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -500,9 +513,8 @@ async def test_temp_change_heater_on_within_tolerance( assert len(calls) == 0 -async def test_temp_change_heater_on_outside_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn heater on outside cold tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -510,14 +522,13 @@ async def test_temp_change_heater_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_heater_off_within_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_off_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -526,9 +537,8 @@ async def test_temp_change_heater_off_within_tolerance( assert len(calls) == 0 -async def test_temp_change_heater_off_outside_tolerance( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_temp_change_heater_off_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn heater off outside hot tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -536,26 +546,26 @@ async def test_temp_change_heater_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_hvac_mode_is_off(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_running_when_hvac_mode_is_off(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_no_state_change_when_hvac_mode_off( - hass: HomeAssistant, setup_comp_2 -) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_no_state_change_when_hvac_mode_off(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -565,7 +575,8 @@ async def test_no_state_change_when_hvac_mode_off( assert len(calls) == 0 -async def test_hvac_mode_heat(hass: HomeAssistant, setup_comp_2) -> None: +@pytest.mark.usefixtures("setup_comp_2") +async def test_hvac_mode_heat(hass: HomeAssistant) -> None: """Test change mode from OFF to HEAT. Switch turns on when temp below setpoint and mode changes. @@ -578,7 +589,7 @@ async def test_hvac_mode_heat(hass: HomeAssistant, setup_comp_2) -> None: await common.async_set_hvac_mode(hass, HVACMode.HEAT) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH @@ -600,7 +611,7 @@ def _setup_switch(hass, is_on): @pytest.fixture -async def setup_comp_3(hass): +async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -623,7 +634,8 @@ async def setup_comp_3(hass): await hass.async_block_till_done() -async def test_set_target_temp_ac_off(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_temp_ac_off(hass: HomeAssistant) -> None: """Test if target temperature turn ac off.""" calls = _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -631,12 +643,13 @@ async def test_set_target_temp_ac_off(hass: HomeAssistant, setup_comp_3) -> None await common.async_set_temperature(hass, 30) assert len(calls) == 2 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_turn_away_mode_on_cooling(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_turn_away_mode_on_cooling(hass: HomeAssistant) -> None: """Test the setting away mode when cooling.""" _setup_switch(hass, True) _setup_sensor(hass, 25) @@ -647,7 +660,8 @@ async def test_turn_away_mode_on_cooling(hass: HomeAssistant, setup_comp_3) -> N assert state.attributes.get("temperature") == 30 -async def test_hvac_mode_cool(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_hvac_mode_cool(hass: HomeAssistant) -> None: """Test change mode from OFF to COOL. Switch turns on when temp below setpoint and mode changes. @@ -660,12 +674,13 @@ async def test_hvac_mode_cool(hass: HomeAssistant, setup_comp_3) -> None: await common.async_set_hvac_mode(hass, HVACMode.COOL) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_set_target_temp_ac_on(hass: HomeAssistant, setup_comp_3) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_target_temp_ac_on(hass: HomeAssistant) -> None: """Test if target temperature turn ac on.""" calls = _setup_switch(hass, False) _setup_sensor(hass, 30) @@ -673,14 +688,13 @@ async def test_set_target_temp_ac_on(hass: HomeAssistant, setup_comp_3) -> None: await common.async_set_temperature(hass, 25) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_off_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_off_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn ac off within tolerance.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -689,9 +703,8 @@ async def test_temp_change_ac_off_within_tolerance( assert len(calls) == 0 -async def test_set_temp_change_ac_off_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_set_temp_change_ac_off_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn ac off.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) @@ -699,14 +712,13 @@ async def test_set_temp_change_ac_off_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_on_within_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_on_within_tolerance(hass: HomeAssistant) -> None: """Test if temperature change doesn't turn ac on within tolerance.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -715,9 +727,8 @@ async def test_temp_change_ac_on_within_tolerance( assert len(calls) == 0 -async def test_temp_change_ac_on_outside_tolerance( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_temp_change_ac_on_outside_tolerance(hass: HomeAssistant) -> None: """Test if temperature change turn ac on.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 25) @@ -725,28 +736,26 @@ async def test_temp_change_ac_on_outside_tolerance( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_running_when_operating_mode_is_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None: """Test that the switch turns off when enabled is set False.""" calls = _setup_switch(hass, True) await common.async_set_temperature(hass, 30) await common.async_set_hvac_mode(hass, HVACMode.OFF) assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH -async def test_no_state_change_when_operation_mode_off_2( - hass: HomeAssistant, setup_comp_3 -) -> None: +@pytest.mark.usefixtures("setup_comp_3") +async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> None: """Test that the switch doesn't turn on when enabled is False.""" calls = _setup_switch(hass, False) await common.async_set_temperature(hass, 30) @@ -858,7 +867,7 @@ async def test_heating_cooling_switch_toggles_when_outside_min_cycle_duration( # Then assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == expected_triggered_service_call assert call.data["entity_id"] == ENT_SWITCH @@ -911,7 +920,7 @@ async def test_hvac_mode_change_toggles_heating_cooling_switch_even_when_within_ @pytest.fixture -async def setup_comp_7(hass): +async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -937,9 +946,8 @@ async def setup_comp_7(hass): await hass.async_block_till_done() -async def test_temp_change_ac_trigger_on_long_enough_3( - hass: HomeAssistant, setup_comp_7 -) -> None: +@pytest.mark.usefixtures("setup_comp_7") +async def test_temp_change_ac_trigger_on_long_enough_3(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -957,14 +965,13 @@ async def test_temp_change_ac_trigger_on_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH -async def test_temp_change_ac_trigger_off_long_enough_3( - hass: HomeAssistant, setup_comp_7 -) -> None: +@pytest.mark.usefixtures("setup_comp_7") +async def test_temp_change_ac_trigger_off_long_enough_3(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) await hass.async_block_till_done() @@ -982,13 +989,13 @@ async def test_temp_change_ac_trigger_off_long_enough_3( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_8(hass): +async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( @@ -1012,9 +1019,8 @@ async def setup_comp_8(hass): await hass.async_block_till_done() -async def test_temp_change_heater_trigger_on_long_enough_2( - hass: HomeAssistant, setup_comp_8 -) -> None: +@pytest.mark.usefixtures("setup_comp_8") +async def test_temp_change_heater_trigger_on_long_enough_2(hass: HomeAssistant) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, True) await hass.async_block_till_done() @@ -1032,13 +1038,14 @@ async def test_temp_change_heater_trigger_on_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_ON assert call.data["entity_id"] == ENT_SWITCH +@pytest.mark.usefixtures("setup_comp_8") async def test_temp_change_heater_trigger_off_long_enough_2( - hass: HomeAssistant, setup_comp_8 + hass: HomeAssistant, ) -> None: """Test if turn on signal is sent at keep-alive intervals.""" calls = _setup_switch(hass, False) @@ -1057,13 +1064,13 @@ async def test_temp_change_heater_trigger_off_long_enough_2( await hass.async_block_till_done() assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @pytest.fixture -async def setup_comp_9(hass): +async def setup_comp_9(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, @@ -1086,12 +1093,13 @@ async def setup_comp_9(hass): await hass.async_block_till_done() -async def test_precision(hass: HomeAssistant, setup_comp_9) -> None: +@pytest.mark.usefixtures("setup_comp_9") +async def test_precision(hass: HomeAssistant) -> None: """Test that setting precision to tenths works as intended.""" hass.config.units = US_CUSTOMARY_SYSTEM - await common.async_set_temperature(hass, 23.27) + await common.async_set_temperature(hass, 55.27) state = hass.states.get(ENTITY) - assert state.attributes.get("temperature") == 23.3 + assert state.attributes.get("temperature") == 55.3 # check that target_temp_step defaults to precision assert state.attributes.get("target_temp_step") == 0.1 @@ -1229,7 +1237,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: # heater must be switched off assert len(calls) == 1 call = calls[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == ENT_SWITCH @@ -1337,7 +1345,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> assert len(calls_on) == 0 assert len(calls_off) == 1 call = calls_off[0] - assert call.domain == HASS_DOMAIN + assert call.domain == HOMEASSISTANT_DOMAIN assert call.service == SERVICE_TURN_OFF assert call.data["entity_id"] == "input_boolean.test" @@ -1431,3 +1439,50 @@ async def test_reload(hass: HomeAssistant) -> None: assert len(hass.states.async_all()) == 1 assert hass.states.get("climate.test") is None assert hass.states.get("climate.reload") + + +async def test_device_id( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device.""" + + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + helper_config_entry = MockConfigEntry( + data={}, + domain=GENERIC_THERMOSTAT_DOMAIN, + options={ + "name": "Test", + "heater": "switch.test_source", + "target_sensor": ENT_SENSOR, + "ac_mode": False, + "cold_tolerance": 0.3, + "hot_tolerance": 0.3, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id diff --git a/tests/components/generic_thermostat/test_config_flow.py b/tests/components/generic_thermostat/test_config_flow.py index 81e06146a14..7a7fdabc6e6 100644 --- a/tests/components/generic_thermostat/test_config_flow.py +++ b/tests/components/generic_thermostat/test_config_flow.py @@ -6,12 +6,11 @@ from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import PRESET_AWAY -from homeassistant.components.generic_thermostat.climate import ( +from homeassistant.components.generic_thermostat.const import ( CONF_AC_MODE, CONF_COLD_TOLERANCE, CONF_HEATER, CONF_HOT_TOLERANCE, - CONF_NAME, CONF_PRESETS, CONF_SENSOR, DOMAIN, @@ -21,6 +20,7 @@ from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, + CONF_NAME, STATE_OFF, UnitOfTemperature, ) diff --git a/tests/components/generic_thermostat/test_init.py b/tests/components/generic_thermostat/test_init.py new file mode 100644 index 00000000000..addae2f684e --- /dev/null +++ b/tests/components/generic_thermostat/test_init.py @@ -0,0 +1,98 @@ +"""Test Generic Thermostat component setup process.""" + +from __future__ import annotations + +from homeassistant.components.generic_thermostat.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_device_cleaning( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleaning of devices linked to the helper config entry.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("switch", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "switch", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("switch.test_source") is not None + + # Configure the configuration entry for helper + helper_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "Test", + "heater": "switch.test_source", + "target_sensor": "sensor.temperature", + "ac_mode": False, + "cold_tolerance": 0.3, + "hot_tolerance": 0.3, + }, + title="Test", + ) + helper_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to config entry + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=helper_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, 3 devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(helper_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the helper entity + helper_entity = entity_registry.async_get("climate.test") + assert helper_entity is not None + assert helper_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + helper_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/geniushub/__init__.py b/tests/components/geniushub/__init__.py new file mode 100644 index 00000000000..15886486e38 --- /dev/null +++ b/tests/components/geniushub/__init__.py @@ -0,0 +1 @@ +"""Tests for the geniushub integration.""" diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py new file mode 100644 index 00000000000..125f1cfa80c --- /dev/null +++ b/tests/components/geniushub/conftest.py @@ -0,0 +1,65 @@ +"""GeniusHub tests configuration.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +from homeassistant.components.geniushub.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME + +from tests.common import MockConfigEntry +from tests.components.smhi.common import AsyncMock + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.geniushub.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_geniushub_client() -> Generator[AsyncMock]: + """Mock a GeniusHub client.""" + with patch( + "homeassistant.components.geniushub.config_flow.GeniusService", + autospec=True, + ) as mock_client: + client = mock_client.return_value + client.request.return_value = { + "data": { + "UID": "aa:bb:cc:dd:ee:ff", + } + } + yield client + + +@pytest.fixture +def mock_local_config_entry() -> MockConfigEntry: + """Mock a local config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="aa:bb:cc:dd:ee:ff", + data={ + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +def mock_cloud_config_entry() -> MockConfigEntry: + """Mock a cloud config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Genius hub", + data={ + CONF_TOKEN: "abcdef", + }, + ) diff --git a/tests/components/geniushub/test_config_flow.py b/tests/components/geniushub/test_config_flow.py new file mode 100644 index 00000000000..9234e03e35a --- /dev/null +++ b/tests/components/geniushub/test_config_flow.py @@ -0,0 +1,482 @@ +"""Test the Geniushub config flow.""" + +from http import HTTPStatus +import socket +from typing import Any +from unittest.mock import AsyncMock + +from aiohttp import ClientConnectionError, ClientResponseError +import pytest + +from homeassistant.components.geniushub import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import ( + CONF_HOST, + CONF_MAC, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_local_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, +) -> None: + """Test full local flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "10.0.0.130" + assert result["data"] == { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + } + assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_local_flow_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test local flow exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + mock_geniushub_client.request.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_geniushub_client.request.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_local_duplicate_data( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, +) -> None: + """Test local flow aborts on duplicate data.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_local_duplicate_mac( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, +) -> None: + """Test local flow aborts on duplicate MAC.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "local_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "local_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username1", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_full_cloud_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, +) -> None: + """Test full cloud flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Genius hub" + assert result["data"] == { + CONF_TOKEN: "abcdef", + } + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_cloud_flow_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test cloud flow exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + mock_geniushub_client.request.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_geniushub_client.request.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_cloud_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_cloud_config_entry: MockConfigEntry, +) -> None: + """Test cloud flow aborts on duplicate data.""" + mock_cloud_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "cloud_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_local_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full local import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "10.0.0.130" + assert result["data"] == data + assert result["result"].unique_id == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +async def test_import_cloud_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], +) -> None: + """Test full cloud import flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Genius hub" + assert result["data"] == data + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + { + CONF_TOKEN: "abcdef", + }, + { + CONF_TOKEN: "abcdef", + CONF_MAC: "aa:bb:cc:dd:ee:ff", + }, + ], +) +@pytest.mark.parametrize( + ("exception", "reason"), + [ + (socket.gaierror, "invalid_host"), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.UNAUTHORIZED), + "invalid_auth", + ), + ( + ClientResponseError(AsyncMock(), (), status=HTTPStatus.NOT_FOUND), + "invalid_host", + ), + (TimeoutError, "cannot_connect"), + (ClientConnectionError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_import_flow_exceptions( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + data: dict[str, Any], + exception: Exception, + reason: str, +) -> None: + """Test import flow exceptions.""" + mock_geniushub_client.request.side_effect = exception + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == reason + + +@pytest.mark.parametrize( + ("data"), + [ + { + CONF_HOST: "10.0.0.130", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + { + CONF_HOST: "10.0.0.131", + CONF_USERNAME: "test-username1", + CONF_PASSWORD: "test-password", + }, + ], +) +async def test_import_flow_local_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_local_config_entry: MockConfigEntry, + data: dict[str, Any], +) -> None: + """Test import flow aborts on local duplicate data.""" + mock_local_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=data, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow_cloud_duplicate( + hass: HomeAssistant, + mock_geniushub_client: AsyncMock, + mock_cloud_config_entry: MockConfigEntry, +) -> None: + """Test import flow aborts on cloud duplicate data.""" + mock_cloud_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_TOKEN: "abcdef", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/geo_json_events/conftest.py b/tests/components/geo_json_events/conftest.py index beab7bf1403..11928e6f012 100644 --- a/tests/components/geo_json_events/conftest.py +++ b/tests/components/geo_json_events/conftest.py @@ -1,9 +1,9 @@ """Configuration for GeoJSON Events tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.geo_json_events import DOMAIN from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_URL diff --git a/tests/components/geo_location/test_trigger.py b/tests/components/geo_location/test_trigger.py index e5fb93dcf8f..7673f357a08 100644 --- a/tests/components/geo_location/test_trigger.py +++ b/tests/components/geo_location/test_trigger.py @@ -29,7 +29,7 @@ def calls(hass: HomeAssistant) -> list[ServiceCall]: @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -49,7 +49,7 @@ def setup_comp(hass): async def test_if_fires_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -96,10 +96,10 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "geo_location - geo_location.entity - hello - hello - test - 0" ) @@ -118,6 +118,8 @@ async def test_if_fires_on_zone_enter( blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set( "geo_location.entity", "hello", @@ -125,11 +127,11 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -162,11 +164,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -199,11 +201,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_zone_leave_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave for unavailable entity.""" hass.states.async_set( @@ -236,11 +238,11 @@ async def test_if_fires_on_zone_leave_2( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -273,11 +275,11 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_appear( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -317,15 +319,16 @@ async def test_if_fires_on_zone_appear( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] == "geo_location - geo_location.entity - - hello - test" + service_calls[0].data["some"] + == "geo_location - geo_location.entity - - hello - test" ) async def test_if_fires_on_zone_appear_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity appears in zone.""" assert await async_setup_component( @@ -373,16 +376,16 @@ async def test_if_fires_on_zone_appear_2( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "geo_location - geo_location.entity - goodbye - hello - test" ) async def test_if_fires_on_zone_disappear( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if entity disappears from zone.""" hass.states.async_set( @@ -423,14 +426,17 @@ async def test_if_fires_on_zone_disappear( hass.states.async_remove("geo_location.entity") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == "geo_location - geo_location.entity - hello - - test" + service_calls[0].data["some"] + == "geo_location - geo_location.entity - hello - - test" ) async def test_zone_undefined( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test for undefined zone.""" hass.states.async_set( @@ -466,7 +472,7 @@ async def test_zone_undefined( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( f"Unable to execute automation automation 0: Zone {zone_does_not_exist} not found" diff --git a/tests/components/geocaching/conftest.py b/tests/components/geocaching/conftest.py index 155cd2c5a7e..28d87176e46 100644 --- a/tests/components/geocaching/conftest.py +++ b/tests/components/geocaching/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from geocachingapi import GeocachingStatus import pytest -from typing_extensions import Generator from homeassistant.components.geocaching.const import DOMAIN diff --git a/tests/components/geofency/test_init.py b/tests/components/geofency/test_init.py index 2228cea80ee..3a98c6480bd 100644 --- a/tests/components/geofency/test_init.py +++ b/tests/components/geofency/test_init.py @@ -137,7 +137,7 @@ async def geofency_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -155,7 +155,7 @@ async def setup_zones(hass): @pytest.fixture -async def webhook_id(hass, geofency_client): +async def webhook_id(hass: HomeAssistant) -> str: """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -173,7 +173,7 @@ async def webhook_id(hass, geofency_client): return result["result"].data["webhook_id"] -async def test_data_validation(geofency_client, webhook_id) -> None: +async def test_data_validation(geofency_client: TestClient, webhook_id: str) -> None: """Test data validation.""" url = f"/api/webhook/{webhook_id}" @@ -195,8 +195,8 @@ async def test_gps_enter_and_exit_home( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - geofency_client, - webhook_id, + geofency_client: TestClient, + webhook_id: str, ) -> None: """Test GPS based zone enter and exit.""" url = f"/api/webhook/{webhook_id}" @@ -240,7 +240,7 @@ async def test_gps_enter_and_exit_home( async def test_beacon_enter_and_exit_home( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test iBeacon based zone enter and exit - a.k.a stationary iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -263,7 +263,7 @@ async def test_beacon_enter_and_exit_home( async def test_beacon_enter_and_exit_car( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test use of mobile iBeacon.""" url = f"/api/webhook/{webhook_id}" @@ -305,7 +305,7 @@ async def test_beacon_enter_and_exit_car( async def test_load_unload_entry( - hass: HomeAssistant, geofency_client, webhook_id + hass: HomeAssistant, geofency_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/gios/test_diagnostics.py b/tests/components/gios/test_diagnostics.py index 903de4872a2..a965e5550df 100644 --- a/tests/components/gios/test_diagnostics.py +++ b/tests/components/gios/test_diagnostics.py @@ -1,6 +1,7 @@ """Test GIOS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -18,4 +19,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/github/conftest.py b/tests/components/github/conftest.py index df7de604c2c..ab262f3f522 100644 --- a/tests/components/github/conftest.py +++ b/tests/components/github/conftest.py @@ -1,9 +1,9 @@ """conftest for the GitHub integration.""" +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN diff --git a/tests/components/goodwe/test_diagnostics.py b/tests/components/goodwe/test_diagnostics.py index 21917265811..0a997edc594 100644 --- a/tests/components/goodwe/test_diagnostics.py +++ b/tests/components/goodwe/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock, patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.goodwe import CONF_MODEL_FAMILY, DOMAIN from homeassistant.const import CONF_HOST @@ -32,4 +33,4 @@ async def test_entry_diagnostics( assert await async_setup_component(hass, DOMAIN, {}) result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/google/conftest.py b/tests/components/google/conftest.py index 26a32a64b21..791e5613b0b 100644 --- a/tests/components/google/conftest.py +++ b/tests/components/google/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import AsyncGenerator, Awaitable, Callable, Generator import datetime import http import time @@ -13,7 +13,6 @@ from aiohttp.client_exceptions import ClientError from gcal_sync.auth import API_BASE_URL from oauth2client.client import OAuth2Credentials import pytest -from typing_extensions import AsyncGenerator, Generator import yaml from homeassistant.components.application_credentials import ( @@ -294,7 +293,7 @@ def mock_calendars_list( @pytest.fixture def mock_calendar_get( aioclient_mock: AiohttpClientMocker, -) -> Callable[[...], None]: +) -> Callable[..., None]: """Fixture for returning a calendar get response.""" def _result( @@ -316,7 +315,7 @@ def mock_calendar_get( @pytest.fixture def mock_insert_event( aioclient_mock: AiohttpClientMocker, -) -> Callable[[...], None]: +) -> Callable[..., None]: """Fixture for capturing event creation.""" def _expect_result( @@ -331,7 +330,7 @@ def mock_insert_event( @pytest.fixture(autouse=True) -async def set_time_zone(hass): +async def set_time_zone(hass: HomeAssistant) -> None: """Set the time zone for the tests.""" # Set our timezone to CST/Regina so we can check calculations # This keeps UTC-6 all year round diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 5fe26585fe5..903b68a5cf2 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -839,7 +839,7 @@ async def test_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -881,7 +881,7 @@ async def test_websocket_create_all_day( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1078,7 +1078,7 @@ async def test_readonly_websocket_create( hass: HomeAssistant, component_setup: ComponentSetup, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, @@ -1129,7 +1129,7 @@ async def test_readonly_search_calendar( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_yaml, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, ws_client: ClientFixture, diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index 12281f6d348..47156299b57 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -116,7 +116,7 @@ async def primary_calendar_status() -> HTTPStatus | None: @pytest.fixture(autouse=True) async def primary_calendar( - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], primary_calendar_error: ClientError | None, primary_calendar_status: HTTPStatus | None, primary_calendar_email: str, diff --git a/tests/components/google/test_diagnostics.py b/tests/components/google/test_diagnostics.py index 5d6259309b8..78eb6d7ceea 100644 --- a/tests/components/google/test_diagnostics.py +++ b/tests/components/google/test_diagnostics.py @@ -11,7 +11,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .conftest import TEST_EVENT, ApiResult, ComponentSetup @@ -55,12 +54,6 @@ def _get_test_client_generator( return auth_client -@pytest.fixture(autouse=True) -async def setup_diag(hass): - """Set up diagnostics platform.""" - assert await async_setup_component(hass, "diagnostics", {}) - - @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( diff --git a/tests/components/google/test_init.py b/tests/components/google/test_init.py index de5e2ea9145..cfcda18df3a 100644 --- a/tests/components/google/test_init.py +++ b/tests/components/google/test_init.py @@ -82,7 +82,7 @@ def assert_state(actual: State | None, expected: State | None) -> None: def add_event_call_service( hass: HomeAssistant, request: pytest.FixtureRequest, -) -> Callable[dict[str, Any], Awaitable[None]]: +) -> Callable[[dict[str, Any]], Awaitable[None]]: """Fixture for calling the add or create event service.""" (domain, service_call, data, target) = request.param @@ -422,7 +422,7 @@ async def test_add_event_invalid_params( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], date_fields: dict[str, Any], expected_error: type[Exception], error_match: str | None, @@ -457,14 +457,14 @@ async def test_add_event_date_in_x( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[..., dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, date_fields: dict[str, Any], start_timedelta: datetime.timedelta, end_timedelta: datetime.timedelta, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that adds an event with various time ranges.""" @@ -496,10 +496,10 @@ async def test_add_event_date( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that sets a date range.""" @@ -535,11 +535,11 @@ async def test_add_event_date_time( hass: HomeAssistant, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that adds an event with a date time range.""" @@ -599,7 +599,7 @@ async def test_unsupported_create_event( mock_calendars_yaml: Mock, component_setup: ComponentSetup, mock_calendars_list: ApiResult, - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], test_api_calendar: dict[str, Any], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, @@ -636,8 +636,8 @@ async def test_add_event_failure( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_insert_event: Callable[[..., dict[str, Any]], None], - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + mock_insert_event: Callable[..., None], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service calls with incorrect fields.""" @@ -661,10 +661,10 @@ async def test_add_event_location( component_setup: ComponentSetup, mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], - mock_insert_event: Callable[[str, dict[str, Any]], None], + mock_insert_event: Callable[..., None], mock_events_list: ApiResult, aioclient_mock: AiohttpClientMocker, - add_event_call_service: Callable[dict[str, Any], Awaitable[None]], + add_event_call_service: Callable[[dict[str, Any]], Awaitable[None]], ) -> None: """Test service call that sets a location field.""" @@ -879,7 +879,7 @@ async def test_assign_unique_id( mock_calendars_list: ApiResult, test_api_calendar: dict[str, Any], mock_events_list: ApiResult, - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], config_entry: MockConfigEntry, ) -> None: """Test an existing config is updated to have unique id if it does not exist.""" @@ -918,7 +918,7 @@ async def test_assign_unique_id_failure( test_api_calendar: dict[str, Any], config_entry: MockConfigEntry, mock_events_list: ApiResult, - mock_calendar_get: Callable[[...], None], + mock_calendar_get: Callable[..., None], request_status: http.HTTPStatus, config_entry_status: ConfigEntryState, ) -> None: diff --git a/tests/components/google_assistant/test_diagnostics.py b/tests/components/google_assistant/test_diagnostics.py index 26d91ce7920..1d68079563c 100644 --- a/tests/components/google_assistant/test_diagnostics.py +++ b/tests/components/google_assistant/test_diagnostics.py @@ -50,4 +50,4 @@ async def test_diagnostics( config_entry = hass.config_entries.async_entries("google_assistant")[0] assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/google_assistant/test_http.py b/tests/components/google_assistant/test_http.py index b041f69828f..273aac1559e 100644 --- a/tests/components/google_assistant/test_http.py +++ b/tests/components/google_assistant/test_http.py @@ -4,6 +4,7 @@ from datetime import UTC, datetime, timedelta from http import HTTPStatus import json import os +from pathlib import Path from typing import Any from unittest.mock import ANY, patch from uuid import uuid4 @@ -655,9 +656,7 @@ async def test_async_get_users( ) path = hass.config.config_dir / ".storage" / GoogleConfigStore._STORAGE_KEY os.makedirs(os.path.dirname(path), exist_ok=True) - with open(path, "w", encoding="utf8") as f: - f.write(store_data) - + await hass.async_add_executor_job(Path(path).write_text, store_data) assert await async_get_users(hass) == expected_users await hass.async_stop() diff --git a/tests/components/google_assistant/test_smart_home.py b/tests/components/google_assistant/test_smart_home.py index 2eeb3d16b81..ea8f6957e38 100644 --- a/tests/components/google_assistant/test_smart_home.py +++ b/tests/components/google_assistant/test_smart_home.py @@ -25,11 +25,12 @@ from homeassistant.components.google_assistant import ( from homeassistant.config import async_process_ha_core_config from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, + EVENT_CALL_SERVICE, Platform, UnitOfTemperature, __version__, ) -from homeassistant.core import EVENT_CALL_SERVICE, HomeAssistant, State +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import ( area_registry as ar, device_registry as dr, diff --git a/tests/components/google_assistant/test_trait.py b/tests/components/google_assistant/test_trait.py index 63a34c01dac..54aa4035670 100644 --- a/tests/components/google_assistant/test_trait.py +++ b/tests/components/google_assistant/test_trait.py @@ -60,6 +60,7 @@ from homeassistant.const import ( ATTR_MODE, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, + EVENT_CALL_SERVICE, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_ALARM_ARMED_AWAY, @@ -75,12 +76,7 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import ( - DOMAIN as HA_DOMAIN, - EVENT_CALL_SERVICE, - HomeAssistant, - State, -) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State from homeassistant.util import color, dt as dt_util from homeassistant.util.unit_conversion import TemperatureConverter @@ -190,12 +186,12 @@ async def test_onoff_group(hass: HomeAssistant) -> None: assert trt_off.query_attributes() == {"on": False} - on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON) + on_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": True}, {}) assert len(on_calls) == 1 assert on_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} - off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF) + off_calls = async_mock_service(hass, HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF) await trt_on.execute(trait.COMMAND_ONOFF, BASIC_DATA, {"on": False}, {}) assert len(off_calls) == 1 assert off_calls[0].data == {ATTR_ENTITY_ID: "group.bla"} diff --git a/tests/components/google_generative_ai_conversation/conftest.py b/tests/components/google_generative_ai_conversation/conftest.py index 1761516e4f5..28c21a9b791 100644 --- a/tests/components/google_generative_ai_conversation/conftest.py +++ b/tests/components/google_generative_ai_conversation/conftest.py @@ -1,5 +1,6 @@ """Tests helpers.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -14,14 +15,14 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_genai(): +def mock_genai() -> Generator[None]: """Mock the genai call in async_setup_entry.""" with patch("google.ai.generativelanguage_v1beta.ModelServiceAsyncClient.get_model"): yield @pytest.fixture -def mock_config_entry(hass, mock_genai): +def mock_config_entry(hass: HomeAssistant, mock_genai: None) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( domain="google_generative_ai_conversation", @@ -35,7 +36,9 @@ def mock_config_entry(hass, mock_genai): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -44,7 +47,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass: HomeAssistant, mock_config_entry: ConfigEntry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: ConfigEntry +) -> None: """Initialize integration.""" assert await async_setup_component(hass, "google_generative_ai_conversation", {}) await hass.async_block_till_done() diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index abd3658e869..65238c5212a 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -215,7 +215,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-None] +# name: test_default_prompt[config_entry_options0-0-None] list([ tuple( '', @@ -263,7 +263,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -311,7 +311,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-None] +# name: test_default_prompt[config_entry_options1-1-None] list([ tuple( '', @@ -360,7 +360,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] list([ tuple( '', diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index c835a4d3b13..d4992c732e1 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -154,10 +154,10 @@ async def test_form(hass: HomeAssistant) -> None: ), ], ) +@pytest.mark.usefixtures("mock_init_component") async def test_options_switching( hass: HomeAssistant, - mock_config_entry, - mock_init_component, + mock_config_entry: MockConfigEntry, mock_models, current_options, new_options, diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index a7ab2c1b337..a8eae34e08b 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, MagicMock, patch from freezegun import freeze_time from google.ai.generativelanguage_v1beta.types.content import FunctionCall -from google.api_core.exceptions import GoogleAPICallError +from google.api_core.exceptions import GoogleAPIError import google.generativeai.types as genai_types import pytest from syrupy.assertion import SnapshotAssertion @@ -17,8 +17,9 @@ from homeassistant.components.google_generative_ai_conversation.const import ( ) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, + _format_schema, ) -from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -38,19 +39,23 @@ def freeze_the_time(): "agent_id", [None, "conversation.google_generative_ai_conversation"] ) @pytest.mark.parametrize( - "config_entry_options", + ("config_entry_options", "expected_features"), [ - {}, - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + ({}, 0), + ( + {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + conversation.ConversationEntityFeature.CONTROL, + ), ], ) +@pytest.mark.usefixtures("mock_init_component") async def test_default_prompt( hass: HomeAssistant, mock_config_entry: MockConfigEntry, - mock_init_component, snapshot: SnapshotAssertion, agent_id: str | None, config_entry_options: {}, + expected_features: conversation.ConversationEntityFeature, hass_ws_client: WebSocketGenerator, ) -> None: """Test that the default prompt works.""" @@ -97,15 +102,18 @@ async def test_default_prompt( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) + state = hass.states.get("conversation.google_generative_ai_conversation") + assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features + @pytest.mark.parametrize( ("model_name", "supports_system_instruction"), [("models/gemini-1.5-pro", True), ("models/gemini-1.0-pro", False)], ) +@pytest.mark.usefixtures("mock_init_component") async def test_chat_history( hass: HomeAssistant, mock_config_entry: MockConfigEntry, - mock_init_component, model_name: str, supports_system_instruction: bool, snapshot: SnapshotAssertion, @@ -167,11 +175,11 @@ async def test_chat_history( @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) +@pytest.mark.usefixtures("mock_init_component") async def test_function_call( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, snapshot: SnapshotAssertion, ) -> None: """Test function calling.""" @@ -269,21 +277,22 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] assert "Answer in plain text" in detail_event["data"]["prompt"] + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) +@pytest.mark.usefixtures("mock_init_component") async def test_function_call_without_parameters( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, snapshot: SnapshotAssertion, ) -> None: """Test function calling without parameters.""" @@ -360,11 +369,11 @@ async def test_function_call_without_parameters( @patch( "homeassistant.components.google_generative_ai_conversation.conversation.llm.AssistAPI._async_get_tools" ) +@pytest.mark.usefixtures("mock_init_component") async def test_function_exception( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, - mock_init_component, ) -> None: """Test exception in function calling.""" agent_id = mock_config_entry_with_assist.entry_id @@ -442,14 +451,15 @@ async def test_function_exception( ) +@pytest.mark.usefixtures("mock_init_component") async def test_error_handling( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test that client errors are caught.""" with patch("google.generativeai.GenerativeModel") as mock_model: mock_chat = AsyncMock() mock_model.return_value.start_chat.return_value = mock_chat - mock_chat.send_message_async.side_effect = GoogleAPICallError("some error") + mock_chat.send_message_async.side_effect = GoogleAPIError("some error") result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) @@ -457,12 +467,13 @@ async def test_error_handling( assert result.response.response_type == intent.IntentResponseType.ERROR, result assert result.response.error_code == "unknown", result assert result.response.as_dict()["speech"]["plain"]["speech"] == ( - "Sorry, I had a problem talking to Google Generative AI: None some error" + "Sorry, I had a problem talking to Google Generative AI: some error" ) +@pytest.mark.usefixtures("mock_init_component") async def test_blocked_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test blocked response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -482,8 +493,9 @@ async def test_blocked_response( ) +@pytest.mark.usefixtures("mock_init_component") async def test_empty_response( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test empty response.""" with patch("google.generativeai.GenerativeModel") as mock_model: @@ -503,10 +515,9 @@ async def test_empty_response( ) +@pytest.mark.usefixtures("mock_init_component") async def test_invalid_llm_api( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test handling of invalid llm api.""" hass.config_entries.async_update_entry( @@ -595,10 +606,9 @@ async def test_template_variables( assert "The user id is 12345." in mock_model.mock_calls[0][2]["system_instruction"] +@pytest.mark.usefixtures("mock_init_component") async def test_conversation_agent( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, + hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Test GoogleGenerativeAIAgent.""" agent = conversation.get_agent_manager(hass).async_get_agent( @@ -620,3 +630,61 @@ async def test_escape_decode() -> None: "param2": "param2's value", "param3": {"param31": "Cheminée", "param32": "Cheminée"}, } + + +@pytest.mark.parametrize( + ("openapi", "protobuf"), + [ + ( + {"type": "string", "enum": ["a", "b", "c"]}, + {"type_": "STRING", "enum": ["a", "b", "c"]}, + ), + ( + {"type": "integer", "enum": [1, 2, 3]}, + {"type_": "STRING", "enum": ["1", "2", "3"]}, + ), + ({"anyOf": [{"type": "integer"}, {"type": "number"}]}, {"type_": "INTEGER"}), + ( + { + "anyOf": [ + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + ] + }, + {"type_": "INTEGER"}, + ), + ({"type": "string", "format": "lower"}, {"type_": "STRING"}), + ({"type": "boolean", "format": "bool"}, {"type_": "BOOLEAN"}), + ( + {"type": "number", "format": "percent"}, + {"type_": "NUMBER", "format_": "percent"}, + ), + ( + { + "type": "object", + "properties": {"var": {"type": "string"}}, + "required": [], + }, + { + "type_": "OBJECT", + "properties": {"var": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "object", "additionalProperties": True}, + { + "type_": "OBJECT", + "properties": {"json": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "array", "items": {"type": "string"}}, + {"type_": "ARRAY", "items": {"type_": "STRING"}}, + ), + ], +) +async def test_format_schema(openapi, protobuf) -> None: + """Test _format_schema.""" + assert _format_schema(openapi) == protobuf diff --git a/tests/components/google_generative_ai_conversation/test_init.py b/tests/components/google_generative_ai_conversation/test_init.py index eeaa777f614..4875323d094 100644 --- a/tests/components/google_generative_ai_conversation/test_init.py +++ b/tests/components/google_generative_ai_conversation/test_init.py @@ -14,11 +14,9 @@ from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_without_images( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -46,11 +44,9 @@ async def test_generate_content_service_without_images( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test generate content service.""" stubbed_generated_content = ( @@ -134,11 +130,9 @@ async def test_generate_content_response_has_empty_parts( ) +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_allowed_path( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image in a not allowed path.""" with ( @@ -165,11 +159,9 @@ async def test_generate_content_service_with_image_not_allowed_path( ) +@pytest.mark.usefixtures("mock_init_component") async def test_generate_content_service_with_image_not_exists( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, ) -> None: """Test generate content service with an image that does not exist.""" with ( @@ -192,12 +184,8 @@ async def test_generate_content_service_with_image_not_exists( ) -async def test_generate_content_service_with_non_image( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_init_component, - snapshot: SnapshotAssertion, -) -> None: +@pytest.mark.usefixtures("mock_init_component") +async def test_generate_content_service_with_non_image(hass: HomeAssistant) -> None: """Test generate content service with a non image.""" with ( patch("pathlib.Path.exists", return_value=True), @@ -254,5 +242,4 @@ async def test_config_entry_error( assert not await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state == state - mock_config_entry.async_get_active_flows(hass, {"reauth"}) assert any(mock_config_entry.async_get_active_flows(hass, {"reauth"})) == reauth diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index a793ade5312..fba561f6df1 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -1,9 +1,10 @@ """The tests for the Google Pub/Sub component.""" +from collections.abc import Generator from dataclasses import dataclass from datetime import datetime import os -from unittest import mock +from unittest.mock import MagicMock, Mock, patch import pytest @@ -40,30 +41,30 @@ async def test_nested() -> None: @pytest.fixture(autouse=True, name="mock_client") -def mock_client_fixture(): +def mock_client_fixture() -> Generator[MagicMock]: """Mock the pubsub client.""" - with mock.patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: + with patch(f"{GOOGLE_PUBSUB_PATH}.PublisherClient") as client: setattr( client, "from_service_account_json", - mock.MagicMock(return_value=mock.MagicMock()), + MagicMock(return_value=MagicMock()), ) yield client @pytest.fixture(autouse=True, name="mock_is_file") -def mock_is_file_fixture(): +def mock_is_file_fixture() -> Generator[MagicMock]: """Mock os.path.isfile.""" - with mock.patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: + with patch(f"{GOOGLE_PUBSUB_PATH}.os.path.isfile") as is_file: is_file.return_value = True yield is_file @pytest.fixture(autouse=True) -def mock_json(hass, monkeypatch): +def mock_json(monkeypatch: pytest.MonkeyPatch) -> None: """Mock the event bus listener and os component.""" monkeypatch.setattr( - f"{GOOGLE_PUBSUB_PATH}.json.dumps", mock.Mock(return_value=mock.MagicMock()) + f"{GOOGLE_PUBSUB_PATH}.json.dumps", Mock(return_value=MagicMock()) ) diff --git a/tests/components/google_sheets/test_config_flow.py b/tests/components/google_sheets/test_config_flow.py index 0da046645d2..a504d8c4280 100644 --- a/tests/components/google_sheets/test_config_flow.py +++ b/tests/components/google_sheets/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Google Sheets config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch from gspread import GSpreadException import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.application_credentials import ( diff --git a/tests/components/google_sheets/test_init.py b/tests/components/google_sheets/test_init.py index 014e89349e2..700783a2e30 100644 --- a/tests/components/google_sheets/test_init.py +++ b/tests/components/google_sheets/test_init.py @@ -214,6 +214,32 @@ async def test_append_sheet( assert len(mock_client.mock_calls) == 8 +async def test_append_sheet_multiple_rows( + hass: HomeAssistant, + setup_integration: ComponentSetup, + config_entry: MockConfigEntry, +) -> None: + """Test service call appending to a sheet.""" + await setup_integration() + + entries = hass.config_entries.async_entries(DOMAIN) + assert len(entries) == 1 + assert entries[0].state is ConfigEntryState.LOADED + + with patch("homeassistant.components.google_sheets.Client") as mock_client: + await hass.services.async_call( + DOMAIN, + "append_sheet", + { + "config_entry": config_entry.entry_id, + "worksheet": "Sheet1", + "data": [{"foo": "bar"}, {"foo": "bar2"}], + }, + blocking=True, + ) + assert len(mock_client.mock_calls) == 8 + + async def test_append_sheet_api_error( hass: HomeAssistant, setup_integration: ComponentSetup, diff --git a/tests/components/google_tasks/test_config_flow.py b/tests/components/google_tasks/test_config_flow.py index f2655afd602..f8ccc5e048f 100644 --- a/tests/components/google_tasks/test_config_flow.py +++ b/tests/components/google_tasks/test_config_flow.py @@ -1,11 +1,11 @@ """Test the Google Tasks config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch from googleapiclient.errors import HttpError from httplib2 import Response import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.google_tasks.const import ( diff --git a/tests/components/google_tasks/test_todo.py b/tests/components/google_tasks/test_todo.py index afbaabe5cd0..b0ee135d4a9 100644 --- a/tests/components/google_tasks/test_todo.py +++ b/tests/components/google_tasks/test_todo.py @@ -10,8 +10,16 @@ from httplib2 import Response import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -376,8 +384,8 @@ async def test_task_items_error_response( ("api_responses", "item_data"), [ (CREATE_API_RESPONSES, {}), - (CREATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (CREATE_API_RESPONSES, {"description": "6-pack"}), + (CREATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (CREATE_API_RESPONSES, {ATTR_DESCRIPTION: "6-pack"}), ], ids=["summary", "due", "description"], ) @@ -399,9 +407,9 @@ async def test_create_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -439,9 +447,9 @@ async def test_create_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -464,9 +472,9 @@ async def test_update_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -504,9 +512,9 @@ async def test_update_todo_list_item_error( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "rename": "Soda", "status": "completed"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_RENAME: "Soda", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -514,12 +522,12 @@ async def test_update_todo_list_item_error( @pytest.mark.parametrize( ("api_responses", "item_data"), [ - (UPDATE_API_RESPONSES, {"rename": "Soda"}), - (UPDATE_API_RESPONSES, {"due_date": "2023-11-18"}), - (UPDATE_API_RESPONSES, {"due_date": None}), - (UPDATE_API_RESPONSES, {"description": "At least one gallon"}), - (UPDATE_API_RESPONSES, {"description": ""}), - (UPDATE_API_RESPONSES, {"description": None}), + (UPDATE_API_RESPONSES, {ATTR_RENAME: "Soda"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: "2023-11-18"}), + (UPDATE_API_RESPONSES, {ATTR_DUE_DATE: None}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: "At least one gallon"}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: ""}), + (UPDATE_API_RESPONSES, {ATTR_DESCRIPTION: None}), ], ids=( "rename", @@ -548,9 +556,9 @@ async def test_partial_update( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", **item_data}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", **item_data}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -578,9 +586,9 @@ async def test_partial_update_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "some-task-id", "status": "needs_action"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "some-task-id", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -622,9 +630,9 @@ async def test_delete_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) assert len(mock_http_response.call_args_list) == 4 @@ -670,9 +678,9 @@ async def test_delete_partial_failure( with pytest.raises(HomeAssistantError, match="Invalid task ID"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1", "some-task-id-2", "some-task-id-3"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -711,9 +719,9 @@ async def test_delete_invalid_json_response( with pytest.raises(HomeAssistantError, match="unexpected response"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -750,9 +758,9 @@ async def test_delete_server_error( with pytest.raises(HomeAssistantError, match="responded with error"): await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["some-task-id-1"]}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["some-task-id-1"]}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) @@ -942,9 +950,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "Milk"}, - target={"entity_id": "todo.my_tasks"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.my_tasks"}, blocking=True, ) diff --git a/tests/components/google_translate/conftest.py b/tests/components/google_translate/conftest.py index 82f8d50b83c..aa84c201f0e 100644 --- a/tests/components/google_translate/conftest.py +++ b/tests/components/google_translate/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Google Translate text-to-speech tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index d19b1269438..41cecd8cd98 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any @@ -9,21 +10,16 @@ from unittest.mock import MagicMock, patch from gtts import gTTSError import pytest -from typing_extensions import Generator from homeassistant.components import tts from homeassistant.components.google_translate.const import CONF_TLD, DOMAIN -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, -) +from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.config import async_process_ha_core_config from homeassistant.const import ATTR_ENTITY_ID, CONF_PLATFORM from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_mock_service +from tests.common import MockConfigEntry from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @@ -39,12 +35,6 @@ def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: return mock_tts_cache_dir -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Mock media player calls.""" - return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) - - @pytest.fixture(autouse=True) async def setup_internal_url(hass: HomeAssistant) -> None: """Set up internal url.""" @@ -126,7 +116,7 @@ async def test_tts_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -139,9 +129,11 @@ async def test_tts_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -181,7 +173,7 @@ async def test_service_say_german_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -194,9 +186,11 @@ async def test_service_say_german_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -236,7 +230,7 @@ async def test_service_say_german_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -249,9 +243,11 @@ async def test_service_say_german_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -290,7 +286,7 @@ async def test_service_say_en_uk_config( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -303,9 +299,11 @@ async def test_service_say_en_uk_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -345,7 +343,7 @@ async def test_service_say_en_uk_service( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -358,9 +356,11 @@ async def test_service_say_en_uk_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -400,7 +400,7 @@ async def test_service_say_en_couk( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -413,9 +413,11 @@ async def test_service_say_en_couk( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) assert len(mock_gtts.mock_calls) == 2 @@ -454,7 +456,7 @@ async def test_service_say_error( hass: HomeAssistant, mock_gtts: MagicMock, hass_client: ClientSessionGenerator, - calls: list[ServiceCall], + service_calls: list[ServiceCall], setup: str, tts_service: str, service_data: dict[str, Any], @@ -469,9 +471,11 @@ async def test_service_say_error( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.NOT_FOUND ) assert len(mock_gtts.mock_calls) == 2 diff --git a/tests/components/google_travel_time/conftest.py b/tests/components/google_travel_time/conftest.py index 141b40eff29..7d1e4791eee 100644 --- a/tests/components/google_travel_time/conftest.py +++ b/tests/components/google_travel_time/conftest.py @@ -1,17 +1,22 @@ """Fixtures for Google Time Travel tests.""" -from unittest.mock import patch +from collections.abc import Generator +from typing import Any +from unittest.mock import MagicMock, patch from googlemaps.exceptions import ApiError, Timeout, TransportError import pytest from homeassistant.components.google_travel_time.const import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture(name="mock_config") -async def mock_config_fixture(hass, data, options): +async def mock_config_fixture( + hass: HomeAssistant, data: dict[str, Any], options: dict[str, Any] +) -> MockConfigEntry: """Mock a Google Travel Time config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -26,7 +31,7 @@ async def mock_config_fixture(hass, data, options): @pytest.fixture(name="bypass_setup") -def bypass_setup_fixture(): +def bypass_setup_fixture() -> Generator[None]: """Bypass entry setup.""" with patch( "homeassistant.components.google_travel_time.async_setup_entry", @@ -36,7 +41,7 @@ def bypass_setup_fixture(): @pytest.fixture(name="bypass_platform_setup") -def bypass_platform_setup_fixture(): +def bypass_platform_setup_fixture() -> Generator[None]: """Bypass platform setup.""" with patch( "homeassistant.components.google_travel_time.sensor.async_setup_entry", @@ -46,7 +51,7 @@ def bypass_platform_setup_fixture(): @pytest.fixture(name="validate_config_entry") -def validate_config_entry_fixture(): +def validate_config_entry_fixture() -> Generator[MagicMock]: """Return valid config entry.""" with ( patch("homeassistant.components.google_travel_time.helpers.Client"), @@ -59,24 +64,24 @@ def validate_config_entry_fixture(): @pytest.fixture(name="invalidate_config_entry") -def invalidate_config_entry_fixture(validate_config_entry): +def invalidate_config_entry_fixture(validate_config_entry: MagicMock) -> None: """Return invalid config entry.""" validate_config_entry.side_effect = ApiError("test") @pytest.fixture(name="invalid_api_key") -def invalid_api_key_fixture(validate_config_entry): +def invalid_api_key_fixture(validate_config_entry: MagicMock) -> None: """Throw a REQUEST_DENIED ApiError.""" validate_config_entry.side_effect = ApiError("REQUEST_DENIED", "Invalid API key.") @pytest.fixture(name="timeout") -def timeout_fixture(validate_config_entry): +def timeout_fixture(validate_config_entry: MagicMock) -> None: """Throw a Timeout exception.""" validate_config_entry.side_effect = Timeout() @pytest.fixture(name="transport_error") -def transport_error_fixture(validate_config_entry): +def transport_error_fixture(validate_config_entry: MagicMock) -> None: """Throw a TransportError exception.""" validate_config_entry.side_effect = TransportError("Unknown.") diff --git a/tests/components/google_travel_time/test_config_flow.py b/tests/components/google_travel_time/test_config_flow.py index 270b82272d8..d16d1c1ffc9 100644 --- a/tests/components/google_travel_time/test_config_flow.py +++ b/tests/components/google_travel_time/test_config_flow.py @@ -29,6 +29,8 @@ from homeassistant.data_entry_flow import FlowResultType from .const import MOCK_CONFIG, RECONFIGURE_CONFIG +from tests.common import MockConfigEntry + async def assert_common_reconfigure_steps( hass: HomeAssistant, reconfigure_result: config_entries.ConfigFlowResult @@ -194,7 +196,7 @@ async def test_malformed_api_key(hass: HomeAssistant) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry", "bypass_setup") -async def test_reconfigure(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: """Test reconfigure flow.""" reconfigure_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -223,7 +225,7 @@ async def test_reconfigure(hass: HomeAssistant, mock_config) -> None: ) @pytest.mark.usefixtures("invalidate_config_entry") async def test_reconfigure_invalid_config_entry( - hass: HomeAssistant, mock_config + hass: HomeAssistant, mock_config: MockConfigEntry ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -259,7 +261,9 @@ async def test_reconfigure_invalid_config_entry( ], ) @pytest.mark.usefixtures("invalid_api_key") -async def test_reconfigure_invalid_api_key(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_invalid_api_key( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -293,7 +297,9 @@ async def test_reconfigure_invalid_api_key(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("transport_error") -async def test_reconfigure_transport_error(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_transport_error( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -327,7 +333,9 @@ async def test_reconfigure_transport_error(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("timeout") -async def test_reconfigure_timeout(hass: HomeAssistant, mock_config) -> None: +async def test_reconfigure_timeout( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -361,7 +369,7 @@ async def test_reconfigure_timeout(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow(hass: HomeAssistant, mock_config) -> None: +async def test_options_flow(hass: HomeAssistant, mock_config: MockConfigEntry) -> None: """Test options flow.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -422,7 +430,9 @@ async def test_options_flow(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_options_flow_departure_time(hass: HomeAssistant, mock_config) -> None: +async def test_options_flow_departure_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test options flow with departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -492,7 +502,9 @@ async def test_options_flow_departure_time(hass: HomeAssistant, mock_config) -> ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_departure_time(hass: HomeAssistant, mock_config) -> None: +async def test_reset_departure_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting departure time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -538,7 +550,9 @@ async def test_reset_departure_time(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_arrival_time(hass: HomeAssistant, mock_config) -> None: +async def test_reset_arrival_time( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting arrival time.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None @@ -582,7 +596,9 @@ async def test_reset_arrival_time(hass: HomeAssistant, mock_config) -> None: ], ) @pytest.mark.usefixtures("validate_config_entry") -async def test_reset_options_flow_fields(hass: HomeAssistant, mock_config) -> None: +async def test_reset_options_flow_fields( + hass: HomeAssistant, mock_config: MockConfigEntry +) -> None: """Test resetting options flow fields that are not time related to None.""" result = await hass.config_entries.options.async_init( mock_config.entry_id, data=None diff --git a/tests/components/google_travel_time/test_sensor.py b/tests/components/google_travel_time/test_sensor.py index 57f3d7a0b98..5ac9ecad482 100644 --- a/tests/components/google_travel_time/test_sensor.py +++ b/tests/components/google_travel_time/test_sensor.py @@ -1,6 +1,7 @@ """Test the Google Maps Travel Time sensors.""" -from unittest.mock import patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest @@ -25,7 +26,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="mock_update") -def mock_update_fixture(): +def mock_update_fixture() -> Generator[MagicMock]: """Mock an update to the sensor.""" with ( patch("homeassistant.components.google_travel_time.sensor.Client"), @@ -56,7 +57,7 @@ def mock_update_fixture(): @pytest.fixture(name="mock_update_duration") -def mock_update_duration_fixture(mock_update): +def mock_update_duration_fixture(mock_update: MagicMock) -> MagicMock: """Mock an update to the sensor returning no duration_in_traffic.""" mock_update.return_value = { "rows": [ @@ -77,7 +78,7 @@ def mock_update_duration_fixture(mock_update): @pytest.fixture(name="mock_update_empty") -def mock_update_empty_fixture(mock_update): +def mock_update_empty_fixture(mock_update: MagicMock) -> MagicMock: """Mock an update to the sensor with an empty response.""" mock_update.return_value = None return mock_update diff --git a/tests/components/govee_ble/__init__.py b/tests/components/govee_ble/__init__.py index 60930d1dd0e..66c5b0b832c 100644 --- a/tests/components/govee_ble/__init__.py +++ b/tests/components/govee_ble/__init__.py @@ -83,3 +83,136 @@ GVH5106_SERVICE_INFO = BluetoothServiceInfo( service_data={}, source="local", ) + + +GV5125_BUTTON_0_SERVICE_INFO = BluetoothServiceInfo( + name="GV51255367", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", + 61320: b".\xaf\x00\x00b\\\xae\x92\x15\xb6\xa8\n\xd4\x81K\xcaK_s\xd9E40\x02", + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + +GV5125_BUTTON_1_SERVICE_INFO = BluetoothServiceInfo( + name="GV51255367", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 60552: b"\x01\n.\xaf\xd9085Sg\x01\x01", + 61320: b".\xaf\x00\x00\xfb\x0e\xc9h\xd7\x05l\xaf*\xf3\x1b\xe8w\xf1\xe1\xe8\xe3\xa7\xf8\xc6", + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5121_MOTION_SERVICE_INFO = BluetoothServiceInfo( + name="GV5121195A", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"Y\x94\x00\x00\xf0\xb9\x197\xaeP\xb67,\x86j\xc2\xf3\xd0a\xe7\x17\xc0,\xef" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5121_MOTION_SERVICE_INFO_2 = BluetoothServiceInfo( + name="GV5121195A", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"Y\x94\x00\x06\xa3f6e\xc8\xe6\xfdv\x04\xaf\xe7k\xbf\xab\xeb\xbf\xb3\xa3\xd5\x19" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5123_OPEN_SERVICE_INFO = BluetoothServiceInfo( + name="GV51230B3D", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"=\xec\x00\x00\xdeCw\xd5^U\xf9\x91In6\xbd\xc6\x7f\x8b,'\x06t\x97" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GV5123_CLOSED_SERVICE_INFO = BluetoothServiceInfo( + name="GV51230B3D", + address="C1:37:37:32:0F:45", + rssi=-36, + manufacturer_data={ + 61320: b"=\xec\x00\x01Y\xdbk\xd9\xbe\xd7\xaf\xf7*&\xaaK\xd7-\xfa\x94W>[\xe9" + }, + service_data={}, + service_uuids=[], + source="24:4C:AB:03:E6:B8", +) + + +GVH5124_SERVICE_INFO = BluetoothServiceInfo( + name="GV51242F68", + address="D3:32:39:37:2F:68", + rssi=-67, + manufacturer_data={ + 61320: b"\x08\xa2\x00\x01%\xc2YW\xfdzu\x0e\xf24\xa2\x18\xbb\x15F|[s{\x04" + }, + service_data={}, + service_uuids=[], + source="local", +) + +GVH5124_2_SERVICE_INFO = BluetoothServiceInfo( + name="GV51242F68", + address="D3:32:39:37:2F:68", + rssi=-67, + manufacturer_data={ + 61320: b"\x08\xa2\x00\x13^Sso\xaeC\x9aU\xcf\xd8\x02\x1b\xdf\xd5\xded;+\xd6\x13" + }, + service_data={}, + service_uuids=[], + source="local", +) + + +GVH5127_MOTION_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-61, + manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x11"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) +GVH5127_PRESENT_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-60, + manufacturer_data={34819: b"\xec\x00\x01\x01\x01\x01"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) +GVH5127_ABSENT_SERVICE_INFO = BluetoothServiceInfo( + name="GVH51275E3F", + address="D0:C9:07:1B:5E:3F", + rssi=-53, + manufacturer_data={34819: b"\xec\x00\x01\x01\x00\x00"}, + service_data={}, + service_uuids=[], + source="Core Bluetooth", +) diff --git a/tests/components/govee_ble/test_binary_sensor.py b/tests/components/govee_ble/test_binary_sensor.py new file mode 100644 index 00000000000..cf8b54ef54f --- /dev/null +++ b/tests/components/govee_ble/test_binary_sensor.py @@ -0,0 +1,84 @@ +"""Test the Govee BLE binary_sensor.""" + +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN +from homeassistant.const import STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant + +from . import ( + GV5123_CLOSED_SERVICE_INFO, + GV5123_OPEN_SERVICE_INFO, + GVH5127_ABSENT_SERVICE_INFO, + GVH5127_MOTION_SERVICE_INFO, + GVH5127_PRESENT_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_window_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the window sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5123_OPEN_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5123"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info(hass, GV5123_OPEN_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("binary_sensor.51230f45_window") + assert motion_sensor.state == STATE_ON + + inject_bluetooth_service_info(hass, GV5123_CLOSED_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.51230f45_window") + assert motion_sensor.state == STATE_OFF + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_presence_sensor(hass: HomeAssistant) -> None: + """Test the presence sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GVH5127_ABSENT_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5127"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info(hass, GVH5127_ABSENT_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_OFF + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_OFF + + inject_bluetooth_service_info(hass, GVH5127_PRESENT_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_OFF + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_ON + + inject_bluetooth_service_info(hass, GVH5127_MOTION_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("binary_sensor.h51275e3f_motion") + assert motion_sensor.state == STATE_ON + occupancy_sensor = hass.states.get("binary_sensor.h51275e3f_occupancy") + assert occupancy_sensor.state == STATE_ON diff --git a/tests/components/govee_ble/test_config_flow.py b/tests/components/govee_ble/test_config_flow.py index 0c340c01f2a..eb0719f832c 100644 --- a/tests/components/govee_ble/test_config_flow.py +++ b/tests/components/govee_ble/test_config_flow.py @@ -3,7 +3,7 @@ from unittest.mock import patch from homeassistant import config_entries -from homeassistant.components.govee_ble.const import DOMAIN +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -29,7 +29,7 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5075 2762" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5075"} assert result2["result"].unique_id == "61DE521B-F0BF-9F44-64D4-75BBE1738105" @@ -75,7 +75,7 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" @@ -198,7 +198,7 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "H5177 2EC8" - assert result2["data"] == {} + assert result2["data"] == {CONF_DEVICE_TYPE: "H5177"} assert result2["result"].unique_id == "4125DDBA-2774-4851-9889-6AADDD4CAC3D" # Verify the original one was aborted diff --git a/tests/components/govee_ble/test_event.py b/tests/components/govee_ble/test_event.py new file mode 100644 index 00000000000..c41cdad3c89 --- /dev/null +++ b/tests/components/govee_ble/test_event.py @@ -0,0 +1,108 @@ +"""Test the Govee BLE events.""" + +from homeassistant.components.govee_ble.const import CONF_DEVICE_TYPE, DOMAIN +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import HomeAssistant + +from . import ( + GV5121_MOTION_SERVICE_INFO, + GV5121_MOTION_SERVICE_INFO_2, + GV5125_BUTTON_0_SERVICE_INFO, + GV5125_BUTTON_1_SERVICE_INFO, + GVH5124_2_SERVICE_INFO, + GVH5124_SERVICE_INFO, +) + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_motion_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the motion sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5121_MOTION_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5121"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("event.h5121_motion") + first_time = motion_sensor.state + assert motion_sensor.state != STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GV5121_MOTION_SERVICE_INFO_2) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("event.h5121_motion") + assert motion_sensor.state != first_time + assert motion_sensor.state != STATE_UNKNOWN + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_button(hass: HomeAssistant) -> None: + """Test setting up creates the buttons.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GV5125_BUTTON_1_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5125"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 6 + inject_bluetooth_service_info(hass, GV5125_BUTTON_1_SERVICE_INFO) + await hass.async_block_till_done() + + button_1 = hass.states.get("event.h5125_button_1") + assert button_1.state == STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GV5125_BUTTON_0_SERVICE_INFO) + await hass.async_block_till_done() + button_1 = hass.states.get("event.h5125_button_1") + assert button_1.state != STATE_UNKNOWN + assert len(hass.states.async_all()) == 7 + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +async def test_vibration_sensor(hass: HomeAssistant) -> None: + """Test setting up creates the vibration sensor.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=GVH5124_SERVICE_INFO.address, + data={CONF_DEVICE_TYPE: "H5124"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 1 + inject_bluetooth_service_info(hass, GVH5124_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 2 + + motion_sensor = hass.states.get("event.h5124_vibration") + first_time = motion_sensor.state + assert motion_sensor.state != STATE_UNKNOWN + + inject_bluetooth_service_info(hass, GVH5124_2_SERVICE_INFO) + await hass.async_block_till_done() + + motion_sensor = hass.states.get("event.h5124_vibration") + assert motion_sensor.state != first_time + assert motion_sensor.state != STATE_UNKNOWN + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/govee_light_local/conftest.py b/tests/components/govee_light_local/conftest.py index 90a9f8e6827..6a8ee99b764 100644 --- a/tests/components/govee_light_local/conftest.py +++ b/tests/components/govee_light_local/conftest.py @@ -1,11 +1,11 @@ """Tests configuration for Govee Local API.""" from asyncio import Event +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from govee_local_api import GoveeLightCapability import pytest -from typing_extensions import Generator from homeassistant.components.govee_light_local.coordinator import GoveeController diff --git a/tests/components/gpsd/conftest.py b/tests/components/gpsd/conftest.py index c323365e8fd..c15ef7f0258 100644 --- a/tests/components/gpsd/conftest.py +++ b/tests/components/gpsd/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the GPSD tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/gpsd/test_config_flow.py b/tests/components/gpsd/test_config_flow.py index 6f330571076..2d68a704119 100644 --- a/tests/components/gpsd/test_config_flow.py +++ b/tests/components/gpsd/test_config_flow.py @@ -43,10 +43,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: async def test_connection_error(hass: HomeAssistant) -> None: """Test connection to host error.""" - with patch("socket.socket") as mock_socket: - mock_connect = mock_socket.return_value.connect - mock_connect.side_effect = OSError - + with patch("socket.socket", side_effect=OSError): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/gpslogger/test_init.py b/tests/components/gpslogger/test_init.py index 68b95df1702..fab6aaa4e84 100644 --- a/tests/components/gpslogger/test_init.py +++ b/tests/components/gpslogger/test_init.py @@ -45,7 +45,7 @@ async def gpslogger_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, @@ -63,7 +63,7 @@ async def setup_zones(hass): @pytest.fixture -async def webhook_id(hass, gpslogger_client): +async def webhook_id(hass: HomeAssistant, gpslogger_client: TestClient) -> str: """Initialize the GPSLogger component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -81,7 +81,9 @@ async def webhook_id(hass, gpslogger_client): return result["result"].data["webhook_id"] -async def test_missing_data(hass: HomeAssistant, gpslogger_client, webhook_id) -> None: +async def test_missing_data( + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str +) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -111,8 +113,8 @@ async def test_enter_and_exit( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - gpslogger_client, - webhook_id, + gpslogger_client: TestClient, + webhook_id: str, ) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -148,7 +150,7 @@ async def test_enter_and_exit( async def test_enter_with_attrs( - hass: HomeAssistant, gpslogger_client, webhook_id + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str ) -> None: """Test when additional attributes are present.""" url = f"/api/webhook/{webhook_id}" @@ -210,7 +212,7 @@ async def test_enter_with_attrs( reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, gpslogger_client, webhook_id + hass: HomeAssistant, gpslogger_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/gree/conftest.py b/tests/components/gree/conftest.py index 88bcaea33c2..a9e2fc9e5d4 100644 --- a/tests/components/gree/conftest.py +++ b/tests/components/gree/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from .common import FakeDiscovery, build_device_mock diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 37b0b0dc15e..32372bebf37 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,8 +5,12 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN -from homeassistant.components.gree.const import COORDINATORS, DOMAIN as GREE +from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.gree.const import ( + COORDINATORS, + DOMAIN as GREE, + UPDATE_INTERVAL, +) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -69,3 +73,30 @@ async def test_discovery_after_setup( device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" assert device_infos[1].ip == "2.2.2.1" + + +async def test_coordinator_updates( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test gree devices update their state.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all(DOMAIN)) == 1 + + callback = device().add_handler.call_args_list[0][0][1] + + async def fake_update_state(*args) -> None: + """Fake update state.""" + device().power = True + callback() + + device().update_state.side_effect = fake_update_state + + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_1) + assert state is not None + assert state.state != HVACMode.OFF diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index 0bd767e4f35..1bf49bbca26 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -4,13 +4,19 @@ from datetime import timedelta from unittest.mock import DEFAULT as DEFAULT_MOCK, AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from greeclimate.device import HorizontalSwing, VerticalSwing +from greeclimate.device import ( + TEMP_MAX, + TEMP_MAX_F, + TEMP_MIN, + TEMP_MIN_F, + HorizontalSwing, + VerticalSwing, +) from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( - ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -40,11 +46,18 @@ from homeassistant.components.gree.climate import ( FAN_MODES_REVERSE, HVAC_MODES, HVAC_MODES_REVERSE, + GreeClimateEntity, +) +from homeassistant.components.gree.const import ( + DISCOVERY_SCAN_INTERVAL, + FAN_MEDIUM_HIGH, + FAN_MEDIUM_LOW, + UPDATE_INTERVAL, ) -from homeassistant.components.gree.const import FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, + ATTR_UNIT_OF_MEASUREMENT, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_UNAVAILABLE, @@ -53,7 +66,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import async_setup_gree, build_device_mock @@ -62,12 +74,6 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{DOMAIN}.fake_device_1" -@pytest.fixture -def mock_now(): - """Fixture for dtutil.now.""" - return dt_util.utcnow() - - async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: """Test discovery is only ever called once.""" await async_setup_gree(hass) @@ -96,7 +102,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: async def test_discovery_setup_connection_error( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test gree integration is setup.""" MockDevice1 = build_device_mock( @@ -118,7 +124,7 @@ async def test_discovery_setup_connection_error( async def test_discovery_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices don't change after multiple discoveries.""" MockDevice1 = build_device_mock( @@ -134,8 +140,7 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 assert len(hass.states.async_all(DOMAIN)) == 2 @@ -144,9 +149,8 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -154,7 +158,7 @@ async def test_discovery_after_setup( async def test_discovery_add_device_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after initial setup.""" MockDevice1 = build_device_mock( @@ -170,6 +174,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.side_effect = [MockDevice1] + await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) await hass.async_block_till_done() @@ -180,9 +186,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice2] device.side_effect = [MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -190,7 +195,7 @@ async def test_discovery_add_device_after_setup( async def test_discovery_device_bind_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after a late device bind.""" MockDevice1 = build_device_mock( @@ -202,8 +207,7 @@ async def test_discovery_device_bind_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.return_value = MockDevice1 - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) @@ -214,9 +218,8 @@ async def test_discovery_device_bind_after_setup( MockDevice1.bind.side_effect = None MockDevice1.update_state.side_effect = None - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -224,7 +227,7 @@ async def test_discovery_device_bind_after_setup( async def test_update_connection_failure( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure exception.""" device().update_state.side_effect = [ @@ -233,36 +236,32 @@ async def test_update_connection_failure( DeviceTimeoutError, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - # First update to make the device available + # Update 2 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + # Update 3 + await run_update() - next_update = mock_now + timedelta(minutes=15) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # Then two more update failures to make the device unavailable + # Update 4 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE -async def test_update_connection_failure_recovery( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now +async def test_update_connection_send_failure_recovery( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure recovery.""" device().update_state.side_effect = [ @@ -271,31 +270,27 @@ async def test_update_connection_failure_recovery( DEFAULT_MOCK, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - # First update becomes unavailable - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + await run_update() # Update 2 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE - # Second update restores the connection - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - + await run_update() # Update 3 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE async def test_update_unhandled_exception( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection unhandled response exception.""" device().update_state.side_effect = [DEFAULT_MOCK, Exception] @@ -306,9 +301,8 @@ async def test_update_unhandled_exception( assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -317,15 +311,13 @@ async def test_update_unhandled_exception( async def test_send_command_device_timeout( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test for sending power on command to the device with a device timeout.""" await async_setup_gree(hass) - # First update to make the device available - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -347,7 +339,40 @@ async def test_send_command_device_timeout( assert state.state != STATE_UNAVAILABLE -async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) -> None: +async def test_unresponsive_device( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test for unresponsive device.""" + await async_setup_gree(hass) + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Update 2 + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + # Update 3, 4, 5 + await run_update() + await run_update() + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state == STATE_UNAVAILABLE + + # Receiving update from device will reset the state to available again + device().device_state_updated("test") + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + +async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: """Test for sending power on command to the device.""" await async_setup_gree(hass) @@ -364,7 +389,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) - async def test_send_power_off_device_timeout( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test for sending power off command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -385,7 +410,7 @@ async def test_send_power_off_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 74)], + [(UnitOfTemperature.CELSIUS, 26), (UnitOfTemperature.FAHRENHEIT, 73)], ) async def test_send_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -405,6 +430,14 @@ async def test_send_target_temperature( # Make sure we're trying to test something that isn't the default assert fake_device.current_temperature != temperature + hass.states.async_set( + ENTITY_ID, + "off", + { + ATTR_UNIT_OF_MEASUREMENT: units, + }, + ) + await hass.services.async_call( DOMAIN, SERVICE_SET_TEMPERATURE, @@ -415,10 +448,6 @@ async def test_send_target_temperature( state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature - assert ( - state.attributes.get(ATTR_CURRENT_TEMPERATURE) - == fake_device.current_temperature - ) assert state.state == HVAC_MODES.get(fake_device.mode) # Reset config temperature_unit back to CELSIUS, required for @@ -462,7 +491,11 @@ async def test_send_target_temperature_with_hvac_mode( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], + [ + (UnitOfTemperature.CELSIUS, 25), + (UnitOfTemperature.FAHRENHEIT, 73), + (UnitOfTemperature.FAHRENHEIT, 74), + ], ) async def test_send_target_temperature_device_timeout( hass: HomeAssistant, discovery, device, units, temperature @@ -492,7 +525,11 @@ async def test_send_target_temperature_device_timeout( @pytest.mark.parametrize( ("units", "temperature"), - [(UnitOfTemperature.CELSIUS, 25), (UnitOfTemperature.FAHRENHEIT, 74)], + [ + (UnitOfTemperature.CELSIUS, 25), + (UnitOfTemperature.FAHRENHEIT, 73), + (UnitOfTemperature.FAHRENHEIT, 74), + ], ) async def test_update_target_temperature( hass: HomeAssistant, discovery, device, units, temperature @@ -505,6 +542,13 @@ async def test_update_target_temperature( await async_setup_gree(hass) + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, + blocking=True, + ) + state = hass.states.get(ENTITY_ID) assert state is not None assert state.attributes.get(ATTR_TEMPERATURE) == temperature @@ -516,9 +560,7 @@ async def test_update_target_temperature( @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) -async def test_send_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset -) -> None: +async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -534,9 +576,7 @@ async def test_send_preset_mode( assert state.attributes.get(ATTR_PRESET_MODE) == preset -async def test_send_invalid_preset_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -557,7 +597,7 @@ async def test_send_invalid_preset_mode( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_send_preset_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for sending preset mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -580,7 +620,7 @@ async def test_send_preset_mode_device_timeout( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_update_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for updating preset mode from the device.""" device().steady_heat = preset == PRESET_AWAY @@ -607,7 +647,7 @@ async def test_update_preset_mode( ], ) async def test_send_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device.""" await async_setup_gree(hass) @@ -629,7 +669,7 @@ async def test_send_hvac_mode( [HVACMode.AUTO, HVACMode.COOL, HVACMode.DRY, HVACMode.FAN_ONLY, HVACMode.HEAT], ) async def test_send_hvac_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -660,7 +700,7 @@ async def test_send_hvac_mode_device_timeout( ], ) async def test_update_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for updating hvac mode from the device.""" device().power = hvac_mode != HVACMode.OFF @@ -677,9 +717,7 @@ async def test_update_hvac_mode( "fan_mode", [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) -async def test_send_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode -) -> None: +async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -695,9 +733,7 @@ async def test_send_fan_mode( assert state.attributes.get(ATTR_FAN_MODE) == fan_mode -async def test_send_invalid_fan_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -719,7 +755,7 @@ async def test_send_invalid_fan_mode( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_send_fan_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for sending fan mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -743,7 +779,7 @@ async def test_send_fan_mode_device_timeout( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_update_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for updating fan mode from the device.""" device().fan_speed = FAN_MODES_REVERSE.get(fan_mode) @@ -759,7 +795,7 @@ async def test_update_fan_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -776,9 +812,7 @@ async def test_send_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_send_invalid_swing_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -799,7 +833,7 @@ async def test_send_invalid_swing_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -822,7 +856,7 @@ async def test_send_swing_mode_device_timeout( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_update_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for updating swing mode from the device.""" device().horizontal_swing = ( @@ -843,6 +877,40 @@ async def test_update_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode +async def test_coordinator_update_handler( + hass: HomeAssistant, discovery, device +) -> None: + """Test for coordinator update handler.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + entity: GreeClimateEntity = hass.data[DOMAIN].get_entity(ENTITY_ID) + assert entity is not None + + # Initial state + assert entity.temperature_unit == UnitOfTemperature.CELSIUS + assert entity.min_temp == TEMP_MIN + assert entity.max_temp == TEMP_MAX + + # Set unit to FAHRENHEIT + device().temperature_units = 1 + entity.coordinator.async_set_updated_data(UnitOfTemperature.FAHRENHEIT) + await hass.async_block_till_done() + + assert entity.temperature_unit == UnitOfTemperature.FAHRENHEIT + assert entity.min_temp == TEMP_MIN_F + assert entity.max_temp == TEMP_MAX_F + + # Set unit back to CELSIUS + device().temperature_units = 0 + entity.coordinator.async_set_updated_data(UnitOfTemperature.CELSIUS) + await hass.async_block_till_done() + + assert entity.temperature_unit == UnitOfTemperature.CELSIUS + assert entity.min_temp == TEMP_MIN + assert entity.max_temp == TEMP_MAX + + @patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion diff --git a/tests/components/greeneye_monitor/conftest.py b/tests/components/greeneye_monitor/conftest.py index ad8a98ce3fe..343a15346e7 100644 --- a/tests/components/greeneye_monitor/conftest.py +++ b/tests/components/greeneye_monitor/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for testing greeneye_monitor.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.greeneye_monitor import DOMAIN from homeassistant.components.sensor import SensorDeviceClass diff --git a/tests/components/group/test_button.py b/tests/components/group/test_button.py new file mode 100644 index 00000000000..c3f4a720d53 --- /dev/null +++ b/tests/components/group/test_button.py @@ -0,0 +1,122 @@ +"""The tests for the group button platform.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.group import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util + + +async def test_default_state( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test button group default state.""" + hass.states.async_set("button.notify_light", "2021-01-01T23:59:59.123+00:00") + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.notify_light", "button.self_destruct"], + "name": "Button group", + "unique_id": "unique_identifier", + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + state = hass.states.get("button.button_group") + assert state is not None + assert state.state == STATE_UNKNOWN + assert state.attributes.get(ATTR_ENTITY_ID) == [ + "button.notify_light", + "button.self_destruct", + ] + + entry = entity_registry.async_get("button.button_group") + assert entry + assert entry.unique_id == "unique_identifier" + + +async def test_state_reporting(hass: HomeAssistant) -> None: + """Test the state reporting. + + The group state is unavailable if all group members are unavailable. + Otherwise, the group state represents the last time the grouped button was pressed. + """ + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: { + "platform": DOMAIN, + "entities": ["button.test1", "button.test2"], + } + }, + ) + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + # Initial state with no group member in the state machine -> unavailable + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members unavailable -> unavailable + hass.states.async_set("button.test1", STATE_UNAVAILABLE) + hass.states.async_set("button.test2", STATE_UNAVAILABLE) + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNAVAILABLE + + # All group members available, but no group member pressed -> unknown + hass.states.async_set("button.test1", "2021-01-01T23:59:59.123+00:00") + hass.states.async_set("button.test2", "2022-02-02T23:59:59.123+00:00") + await hass.async_block_till_done() + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + + +@pytest.mark.usefixtures("enable_custom_integrations") +async def test_service_calls( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test service calls.""" + await async_setup_component( + hass, + BUTTON_DOMAIN, + { + BUTTON_DOMAIN: [ + {"platform": "demo"}, + { + "platform": DOMAIN, + "entities": [ + "button.push", + "button.self_destruct", + ], + }, + ] + }, + ) + await hass.async_block_till_done() + + assert hass.states.get("button.button_group").state == STATE_UNKNOWN + assert hass.states.get("button.push").state == STATE_UNKNOWN + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + freezer.move_to(now) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.button_group"}, + blocking=True, + ) + + assert hass.states.get("button.button_group").state == now.isoformat() + assert hass.states.get("button.push").state == now.isoformat() diff --git a/tests/components/group/test_config_flow.py b/tests/components/group/test_config_flow.py index c6ee4ae5a87..461df19ebf8 100644 --- a/tests/components/group/test_config_flow.py +++ b/tests/components/group/test_config_flow.py @@ -29,6 +29,7 @@ from tests.typing import WebSocketGenerator [ ("binary_sensor", "on", "on", {}, {}, {"all": False}, {}), ("binary_sensor", "on", "on", {}, {"all": True}, {"all": True}, {}), + ("button", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("cover", "open", "open", {}, {}, {}, {}), ( "event", @@ -45,6 +46,7 @@ from tests.typing import WebSocketGenerator ("fan", "on", "on", {}, {}, {}, {}), ("light", "on", "on", {}, {}, {}, {}), ("lock", "locked", "locked", {}, {}, {}, {}), + ("notify", STATE_UNKNOWN, "2021-01-01T23:59:59.123+00:00", {}, {}, {}, {}), ("media_player", "on", "on", {}, {}, {}, {}), ( "sensor", @@ -135,11 +137,13 @@ async def test_config_flow( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), + ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -212,11 +216,13 @@ def get_suggested(schema, key): ("group_type", "member_state", "extra_options", "options_options"), [ ("binary_sensor", "on", {"all": False}, {}), + ("button", "2021-01-01T23:59:59.123+00:00", {}, {}), ("cover", "open", {}, {}), ("event", "2021-01-01T23:59:59.123+00:00", {}, {}), ("fan", "on", {}, {}), ("light", "on", {"all": False}, {}), ("lock", "locked", {}, {}), + ("notify", "2021-01-01T23:59:59.123+00:00", {}, {}), ("media_player", "on", {}, {}), ( "sensor", @@ -396,11 +402,13 @@ async def test_all_options( ("group_type", "extra_input"), [ ("binary_sensor", {"all": False}), + ("button", {}), ("cover", {}), ("event", {}), ("fan", {}), ("light", {}), ("lock", {}), + ("notify", {}), ("media_player", {}), ("switch", {}), ], @@ -483,6 +491,7 @@ LIGHT_ATTRS = [ {"color_mode": "unknown"}, ] LOCK_ATTRS = [{"supported_features": 1}, {}] +NOTIFY_ATTRS = [{"supported_features": 0}, {}] MEDIA_PLAYER_ATTRS = [{"supported_features": 0}, {}] SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two"}] @@ -491,11 +500,13 @@ SENSOR_ATTRS = [{"icon": "mdi:calculator"}, {"max_entity_id": "sensor.input_two" ("domain", "extra_user_input", "input_states", "group_state", "extra_attributes"), [ ("binary_sensor", {"all": True}, ["on", "off"], "off", [{}, {}]), + ("button", {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), + ("notify", {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ("sensor", {"type": "max"}, ["10", "20"], "20.0", SENSOR_ATTRS), ("switch", {}, ["on", "off"], "on", [{}, {}]), @@ -600,11 +611,13 @@ async def test_config_flow_preview( ), [ ("binary_sensor", {"all": True}, {"all": False}, ["on", "off"], "on", [{}, {}]), + ("button", {}, {}, ["", ""], "unknown", [{}, {}]), ("cover", {}, {}, ["open", "closed"], "open", COVER_ATTRS), ("event", {}, {}, ["", ""], "unknown", EVENT_ATTRS), ("fan", {}, {}, ["on", "off"], "on", FAN_ATTRS), ("light", {}, {}, ["on", "off"], "on", LIGHT_ATTRS), ("lock", {}, {}, ["unlocked", "locked"], "unlocked", LOCK_ATTRS), + ("notify", {}, {}, ["", ""], "unknown", NOTIFY_ATTRS), ("media_player", {}, {}, ["on", "off"], "on", MEDIA_PLAYER_ATTRS), ( "sensor", diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index 5b5d8fa873c..c687ca21e2d 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -2,6 +2,7 @@ import asyncio from datetime import timedelta +from typing import Any import pytest @@ -90,7 +91,9 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp(hass, config_count): +async def setup_comp( + hass: HomeAssistant, config_count: tuple[dict[str, Any], int] +) -> None: """Set up group cover component.""" config, count = config_count with assert_setup_component(count, DOMAIN): @@ -101,7 +104,8 @@ async def setup_comp(hass, config_count): @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_state(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state(hass: HomeAssistant) -> None: """Test handling of state. The group state is unknown if all group members are unknown or unavailable. @@ -250,8 +254,9 @@ async def test_state(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) +@pytest.mark.usefixtures("setup_comp") async def test_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test handling of state attributes.""" state = hass.states.get(COVER_GROUP) @@ -416,9 +421,8 @@ async def test_attributes( @pytest.mark.parametrize("config_count", [(CONFIG_TILT_ONLY, 2)]) -async def test_cover_that_only_supports_tilt_removed( - hass: HomeAssistant, setup_comp -) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> None: """Test removing a cover that support tilt.""" hass.states.async_set( DEMO_COVER_TILT, @@ -446,7 +450,8 @@ async def test_cover_that_only_supports_tilt_removed( @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_open_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_open_covers(hass: HomeAssistant) -> None: """Test open cover function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -467,7 +472,8 @@ async def test_open_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_close_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_close_covers(hass: HomeAssistant) -> None: """Test close cover function.""" await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -488,7 +494,8 @@ async def test_close_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_toggle_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_toggle_covers(hass: HomeAssistant) -> None: """Test toggle cover function.""" # Start covers in open state await hass.services.async_call( @@ -538,7 +545,8 @@ async def test_toggle_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_stop_covers(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_stop_covers(hass: HomeAssistant) -> None: """Test stop cover function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -564,7 +572,8 @@ async def test_stop_covers(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_set_cover_position(hass: HomeAssistant) -> None: """Test set cover position function.""" await hass.services.async_call( DOMAIN, @@ -587,7 +596,8 @@ async def test_set_cover_position(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_open_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_open_tilts(hass: HomeAssistant) -> None: """Test open tilt function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -607,7 +617,8 @@ async def test_open_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_close_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_close_tilts(hass: HomeAssistant) -> None: """Test close tilt function.""" await hass.services.async_call( DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -625,7 +636,8 @@ async def test_close_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_toggle_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_toggle_tilts(hass: HomeAssistant) -> None: """Test toggle tilt function.""" # Start tilted open await hass.services.async_call( @@ -678,7 +690,8 @@ async def test_toggle_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_stop_tilts(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_stop_tilts(hass: HomeAssistant) -> None: """Test stop tilts function.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True @@ -702,7 +715,8 @@ async def test_stop_tilts(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)]) -async def test_set_tilt_positions(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_set_tilt_positions(hass: HomeAssistant) -> None: """Test set tilt position function.""" await hass.services.async_call( DOMAIN, @@ -723,7 +737,8 @@ async def test_set_tilt_positions(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)]) -async def test_is_opening_closing(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_is_opening_closing(hass: HomeAssistant) -> None: """Test is_opening property.""" await hass.services.async_call( DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True diff --git a/tests/components/group/test_fan.py b/tests/components/group/test_fan.py index 6aa6fc2933d..184693f7618 100644 --- a/tests/components/group/test_fan.py +++ b/tests/components/group/test_fan.py @@ -1,6 +1,7 @@ """The tests for the group fan platform.""" import asyncio +from typing import Any from unittest.mock import patch import pytest @@ -102,7 +103,9 @@ CONFIG_ATTRIBUTES = { @pytest.fixture -async def setup_comp(hass, config_count): +async def setup_comp( + hass: HomeAssistant, config_count: tuple[dict[str, Any], int] +) -> None: """Set up group fan component.""" config, count = config_count with assert_setup_component(count, DOMAIN): @@ -113,9 +116,8 @@ async def setup_comp(hass, config_count): @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_state( - hass: HomeAssistant, entity_registry: er.EntityRegistry, setup_comp -) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test handling of state. The group state is on if at least one group member is on. @@ -210,7 +212,8 @@ async def test_state( @pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)]) -async def test_attributes(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_attributes(hass: HomeAssistant) -> None: """Test handling of state attributes.""" state = hass.states.get(FAN_GROUP) assert state.state == STATE_UNAVAILABLE @@ -267,7 +270,8 @@ async def test_attributes(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -async def test_direction_oscillating(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_direction_oscillating(hass: HomeAssistant) -> None: """Test handling of direction and oscillating attributes.""" hass.states.async_set( @@ -378,7 +382,8 @@ async def test_direction_oscillating(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -async def test_state_missing_entity_id(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_state_missing_entity_id(hass: HomeAssistant) -> None: """Test we can still setup with a missing entity id.""" state = hass.states.get(FAN_GROUP) await hass.async_block_till_done() @@ -398,7 +403,8 @@ async def test_setup_before_started(hass: HomeAssistant) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_MISSING_FAN, 2)]) -async def test_reload(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_reload(hass: HomeAssistant) -> None: """Test the ability to reload fans.""" await hass.async_block_till_done() await hass.async_start() @@ -421,7 +427,8 @@ async def test_reload(hass: HomeAssistant, setup_comp) -> None: @pytest.mark.parametrize("config_count", [(CONFIG_FULL_SUPPORT, 2)]) -async def test_service_calls(hass: HomeAssistant, setup_comp) -> None: +@pytest.mark.usefixtures("setup_comp") +async def test_service_calls(hass: HomeAssistant) -> None: """Test calling services.""" await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True diff --git a/tests/components/group/test_init.py b/tests/components/group/test_init.py index 7434de74f63..bbbe22cba83 100644 --- a/tests/components/group/test_init.py +++ b/tests/components/group/test_init.py @@ -405,13 +405,13 @@ async def test_expand_entity_ids_does_not_return_duplicates( order=None, ) - assert ["light.bowl", "light.ceiling"] == sorted( + assert sorted( group.expand_entity_ids(hass, [test_group.entity_id, "light.Ceiling"]) - ) + ) == ["light.bowl", "light.ceiling"] - assert ["light.bowl", "light.ceiling"] == sorted( + assert sorted( group.expand_entity_ids(hass, ["light.bowl", test_group.entity_id]) - ) + ) == ["light.bowl", "light.ceiling"] async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: @@ -439,7 +439,7 @@ async def test_expand_entity_ids_recursive(hass: HomeAssistant) -> None: async def test_expand_entity_ids_ignores_non_strings(hass: HomeAssistant) -> None: """Test that non string elements in lists are ignored.""" - assert [] == group.expand_entity_ids(hass, [5, True]) + assert group.expand_entity_ids(hass, [5, True]) == [] async def test_get_entity_ids(hass: HomeAssistant) -> None: @@ -460,9 +460,10 @@ async def test_get_entity_ids(hass: HomeAssistant) -> None: order=None, ) - assert ["light.bowl", "light.ceiling"] == sorted( - group.get_entity_ids(hass, test_group.entity_id) - ) + assert sorted(group.get_entity_ids(hass, test_group.entity_id)) == [ + "light.bowl", + "light.ceiling", + ] async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: @@ -482,19 +483,19 @@ async def test_get_entity_ids_with_domain_filter(hass: HomeAssistant) -> None: order=None, ) - assert ["switch.ac"] == group.get_entity_ids( + assert group.get_entity_ids( hass, mixed_group.entity_id, domain_filter="switch" - ) + ) == ["switch.ac"] async def test_get_entity_ids_with_non_existing_group_name(hass: HomeAssistant) -> None: """Test get_entity_ids with a non existing group.""" - assert [] == group.get_entity_ids(hass, "non_existing") + assert group.get_entity_ids(hass, "non_existing") == [] async def test_get_entity_ids_with_non_group_state(hass: HomeAssistant) -> None: """Test get_entity_ids with a non group state.""" - assert [] == group.get_entity_ids(hass, "switch.AC") + assert group.get_entity_ids(hass, "switch.AC") == [] async def test_group_being_init_before_first_tracked_state_is_set_to_on( @@ -620,12 +621,12 @@ async def test_expand_entity_ids_expands_nested_groups(hass: HomeAssistant) -> N order=None, ) - assert [ + assert sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) == [ "light.test_1", "light.test_2", "switch.test_1", "switch.test_2", - ] == sorted(group.expand_entity_ids(hass, ["group.group_of_groups"])) + ] async def test_set_assumed_state_based_on_tracked(hass: HomeAssistant) -> None: diff --git a/tests/components/group/test_media_player.py b/tests/components/group/test_media_player.py index 451aae200b3..23cdd1598dd 100644 --- a/tests/components/group/test_media_player.py +++ b/tests/components/group/test_media_player.py @@ -1,14 +1,16 @@ """The tests for the Media group platform.""" import asyncio -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from homeassistant.components.group import DOMAIN from homeassistant.components.media_player import ( + ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_EXTRA, ATTR_MEDIA_SEEK_POSITION, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_TRACK, @@ -45,7 +47,7 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import entity_platform, entity_registry as er from homeassistant.setup import async_setup_component @@ -598,3 +600,59 @@ async def test_nested_group(hass: HomeAssistant) -> None: assert hass.states.get("media_player.kitchen").state == STATE_OFF assert hass.states.get("media_player.group_1").state == STATE_OFF assert hass.states.get("media_player.nested_group").state == STATE_OFF + + +async def test_service_play_media_kwargs(hass: HomeAssistant) -> None: + """Test that kwargs get passed through on play_media service call.""" + await async_setup_component( + hass, + MEDIA_DOMAIN, + { + MEDIA_DOMAIN: [ + {"platform": "demo"}, + { + "platform": DOMAIN, + "entities": [ + "media_player.bedroom", + "media_player.living_room", + ], + }, + ] + }, + ) + + await hass.async_block_till_done() + await hass.async_start() + await hass.async_block_till_done() + + platform = entity_platform.async_get_platforms(hass, "media_player")[0] + mp_bedroom = platform.domain_entities["media_player.bedroom"] + mp_bedroom.play_media = MagicMock() + + mp_living_room = platform.domain_entities["media_player.living_room"] + mp_living_room.play_media = MagicMock() + + await hass.services.async_call( + MEDIA_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.media_group", + ATTR_MEDIA_CONTENT_TYPE: "some_type", + ATTR_MEDIA_CONTENT_ID: "some_id", + ATTR_MEDIA_ANNOUNCE: "true", + ATTR_MEDIA_EXTRA: { + "volume": 20, + }, + }, + ) + await hass.async_block_till_done() + + assert mp_bedroom.play_media.call_count == 1 + mp_bedroom.play_media.assert_called_with( + "some_type", "some_id", announce=True, extra={"volume": 20} + ) + + assert mp_living_room.play_media.call_count == 1 + mp_living_room.play_media.assert_called_with( + "some_type", "some_id", announce=True, extra={"volume": 20} + ) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index dfd200a1542..2595b211dae 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -1,18 +1,44 @@ """The tests for the notify.group platform.""" -from collections.abc import Mapping +from collections.abc import Generator, Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, call, patch +import pytest + from homeassistant import config as hass_config from homeassistant.components import notify -from homeassistant.components.group import SERVICE_RELOAD +from homeassistant.components.group import DOMAIN, SERVICE_RELOAD +from homeassistant.components.notify import ( + ATTR_MESSAGE, + ATTR_TITLE, + DOMAIN as NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + NotifyEntity, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import MockPlatform, get_fixture_path, mock_platform +from tests.common import ( + MockConfigEntry, + MockEntity, + MockModule, + MockPlatform, + get_fixture_path, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) class MockNotifyPlatform(MockPlatform): @@ -217,3 +243,144 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: assert hass.services.has_service(notify.DOMAIN, "test_service2") assert not hass.services.has_service(notify.DOMAIN, "group_notify") assert hass.services.has_service(notify.DOMAIN, "new_group_notify") + + +class MockFlow(ConfigFlow): + """Test flow.""" + + +@pytest.fixture +def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: + """Mock config flow.""" + mock_platform(hass, "test.config_flow") + + with mock_config_flow("test", MockFlow): + yield + + +class MockNotifyEntity(MockEntity, NotifyEntity): + """Mock Email notifier entity to use in tests.""" + + def __init__(self, **values: Any) -> None: + """Initialize the mock entity.""" + super().__init__(**values) + self.send_message_mock_calls = MagicMock() + + async def async_send_message(self, message: str, title: str | None = None) -> None: + """Send a notification message.""" + self.send_message_mock_calls(message, title=title) + + +async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [Platform.NOTIFY] + ) + return True + + +async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry +) -> bool: + """Unload test config entry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.NOTIFY] + ) + + +@pytest.fixture +async def mock_notifiers( + hass: HomeAssistant, config_flow_fixture: None +) -> list[NotifyEntity]: + """Set up the notify entities.""" + entity = MockNotifyEntity(name="test", entity_id="notify.test") + entity2 = MockNotifyEntity(name="test2", entity_id="notify.test2") + entities = [entity, entity2] + test_entry = MockConfigEntry(domain="test") + test_entry.add_to_hass(hass) + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + setup_test_component_platform(hass, NOTIFY_DOMAIN, entities, from_config_entry=True) + assert await hass.config_entries.async_setup(test_entry.entry_id) + await hass.async_block_till_done() + return entities + + +async def test_notify_entity_group( + hass: HomeAssistant, mock_notifiers: list[NotifyEntity] +) -> None: + """Test sending a message to a notify group.""" + entity, entity2 = mock_notifiers + assert entity.send_message_mock_calls.call_count == 0 + assert entity2.send_message_mock_calls.call_count == 0 + + config_entry = MockConfigEntry( + domain=DOMAIN, + options={ + "group_type": "notify", + "name": "Test Group", + "entities": ["notify.test", "notify.test2"], + "hide_members": True, + }, + title="Test Group", + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + NOTIFY_DOMAIN, + SERVICE_SEND_MESSAGE, + { + ATTR_MESSAGE: "Hello", + ATTR_TITLE: "Test notification", + ATTR_ENTITY_ID: "notify.test_group", + }, + blocking=True, + ) + + assert entity.send_message_mock_calls.call_count == 1 + assert entity.send_message_mock_calls.call_args == call( + "Hello", title="Test notification" + ) + assert entity2.send_message_mock_calls.call_count == 1 + assert entity2.send_message_mock_calls.call_args == call( + "Hello", title="Test notification" + ) + + +async def test_state_reporting(hass: HomeAssistant) -> None: + """Test sending a message to a notify group.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + options={ + "group_type": "notify", + "name": "Test Group", + "entities": ["notify.test", "notify.test2"], + "hide_members": True, + }, + title="Test Group", + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE + + hass.states.async_set("notify.test", STATE_UNAVAILABLE) + hass.states.async_set("notify.test2", STATE_UNAVAILABLE) + await hass.async_block_till_done() + assert hass.states.get("notify.test_group").state == STATE_UNAVAILABLE + + hass.states.async_set("notify.test", "2021-01-01T23:59:59.123+00:00") + hass.states.async_set("notify.test2", "2021-01-01T23:59:59.123+00:00") + await hass.async_block_till_done() + assert hass.states.get("notify.test_group").state == STATE_UNKNOWN diff --git a/tests/components/guardian/conftest.py b/tests/components/guardian/conftest.py index 87ff96aff45..61813cb1df5 100644 --- a/tests/components/guardian/conftest.py +++ b/tests/components/guardian/conftest.py @@ -1,16 +1,18 @@ """Define fixtures for Elexa Guardian tests.""" -import json +from collections.abc import AsyncGenerator, Generator +from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.guardian import CONF_UID, DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -23,7 +25,9 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, unique_id): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any], unique_id: str +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -35,7 +39,7 @@ def config_entry_fixture(hass, config, unique_id): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -44,68 +48,68 @@ def config_fixture(hass): @pytest.fixture(name="data_sensor_pair_dump", scope="package") -def data_sensor_pair_dump_fixture(): +def data_sensor_pair_dump_fixture() -> JsonObjectType: """Define data from a successful sensor_pair_dump response.""" - return json.loads(load_fixture("sensor_pair_dump_data.json", "guardian")) + return load_json_object_fixture("sensor_pair_dump_data.json", "guardian") @pytest.fixture(name="data_sensor_pair_sensor", scope="package") -def data_sensor_pair_sensor_fixture(): +def data_sensor_pair_sensor_fixture() -> JsonObjectType: """Define data from a successful sensor_pair_sensor response.""" - return json.loads(load_fixture("sensor_pair_sensor_data.json", "guardian")) + return load_json_object_fixture("sensor_pair_sensor_data.json", "guardian") @pytest.fixture(name="data_sensor_paired_sensor_status", scope="package") -def data_sensor_paired_sensor_status_fixture(): +def data_sensor_paired_sensor_status_fixture() -> JsonObjectType: """Define data from a successful sensor_paired_sensor_status response.""" - return json.loads(load_fixture("sensor_paired_sensor_status_data.json", "guardian")) + return load_json_object_fixture("sensor_paired_sensor_status_data.json", "guardian") @pytest.fixture(name="data_system_diagnostics", scope="package") -def data_system_diagnostics_fixture(): +def data_system_diagnostics_fixture() -> JsonObjectType: """Define data from a successful system_diagnostics response.""" - return json.loads(load_fixture("system_diagnostics_data.json", "guardian")) + return load_json_object_fixture("system_diagnostics_data.json", "guardian") @pytest.fixture(name="data_system_onboard_sensor_status", scope="package") -def data_system_onboard_sensor_status_fixture(): +def data_system_onboard_sensor_status_fixture() -> JsonObjectType: """Define data from a successful system_onboard_sensor_status response.""" - return json.loads( - load_fixture("system_onboard_sensor_status_data.json", "guardian") + return load_json_object_fixture( + "system_onboard_sensor_status_data.json", "guardian" ) @pytest.fixture(name="data_system_ping", scope="package") -def data_system_ping_fixture(): +def data_system_ping_fixture() -> JsonObjectType: """Define data from a successful system_ping response.""" - return json.loads(load_fixture("system_ping_data.json", "guardian")) + return load_json_object_fixture("system_ping_data.json", "guardian") @pytest.fixture(name="data_valve_status", scope="package") -def data_valve_status_fixture(): +def data_valve_status_fixture() -> JsonObjectType: """Define data from a successful valve_status response.""" - return json.loads(load_fixture("valve_status_data.json", "guardian")) + return load_json_object_fixture("valve_status_data.json", "guardian") @pytest.fixture(name="data_wifi_status", scope="package") -def data_wifi_status_fixture(): +def data_wifi_status_fixture() -> JsonObjectType: """Define data from a successful wifi_status response.""" - return json.loads(load_fixture("wifi_status_data.json", "guardian")) + return load_json_object_fixture("wifi_status_data.json", "guardian") @pytest.fixture(name="setup_guardian") async def setup_guardian_fixture( - hass, - config, - data_sensor_pair_dump, - data_sensor_pair_sensor, - data_sensor_paired_sensor_status, - data_system_diagnostics, - data_system_onboard_sensor_status, - data_system_ping, - data_valve_status, - data_wifi_status, -): + hass: HomeAssistant, + config: dict[str, Any], + data_sensor_pair_dump: JsonObjectType, + data_sensor_pair_sensor: JsonObjectType, + data_sensor_paired_sensor_status: JsonObjectType, + data_system_diagnostics: JsonObjectType, + data_system_onboard_sensor_status: JsonObjectType, + data_system_ping: JsonObjectType, + data_valve_status: JsonObjectType, + data_wifi_status: JsonObjectType, +) -> AsyncGenerator[None]: """Define a fixture to set up Guardian.""" with ( patch("aioguardian.client.Client.connect"), @@ -155,6 +159,6 @@ async def setup_guardian_fixture( @pytest.fixture(name="unique_id") -def unique_id_fixture(hass): +def unique_id_fixture() -> str: """Define a config entry unique ID fixture.""" return "guardian_3456" diff --git a/tests/components/guardian/test_config_flow.py b/tests/components/guardian/test_config_flow.py index 0f99578768a..6c06171a45f 100644 --- a/tests/components/guardian/test_config_flow.py +++ b/tests/components/guardian/test_config_flow.py @@ -1,6 +1,7 @@ """Define tests for the Elexa Guardian config flow.""" from ipaddress import ip_address +from typing import Any from unittest.mock import patch from aioguardian.errors import GuardianError @@ -22,9 +23,8 @@ from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -async def test_duplicate_error( - hass: HomeAssistant, config, config_entry, setup_guardian -) -> None: +@pytest.mark.usefixtures("config_entry", "setup_guardian") +async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that errors are shown when duplicate entries are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -33,7 +33,7 @@ async def test_duplicate_error( assert result["reason"] == "already_configured" -async def test_connect_error(hass: HomeAssistant, config) -> None: +async def test_connect_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that the config entry errors out if the device cannot connect.""" with patch( "aioguardian.client.Client.connect", @@ -58,7 +58,8 @@ async def test_get_pin_from_uid() -> None: assert pin == "3456" -async def test_step_user(hass: HomeAssistant, config, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test the user step.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} @@ -78,7 +79,8 @@ async def test_step_user(hass: HomeAssistant, config, setup_guardian) -> None: } -async def test_step_zeroconf(hass: HomeAssistant, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_zeroconf(hass: HomeAssistant) -> None: """Test the zeroconf step.""" zeroconf_data = zeroconf.ZeroconfServiceInfo( ip_address=ip_address("192.168.1.100"), @@ -133,7 +135,8 @@ async def test_step_zeroconf_already_in_progress(hass: HomeAssistant) -> None: assert result["reason"] == "already_in_progress" -async def test_step_dhcp(hass: HomeAssistant, setup_guardian) -> None: +@pytest.mark.usefixtures("setup_guardian") +async def test_step_dhcp(hass: HomeAssistant) -> None: """Test the dhcp step.""" dhcp_data = dhcp.DhcpServiceInfo( ip="192.168.1.100", diff --git a/tests/components/guardian/test_diagnostics.py b/tests/components/guardian/test_diagnostics.py index 02b620b8e01..3b3ed21bc65 100644 --- a/tests/components/guardian/test_diagnostics.py +++ b/tests/components/guardian/test_diagnostics.py @@ -4,15 +4,16 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.guardian import DOMAIN, GuardianData from homeassistant.core import HomeAssistant +from tests.common import ANY, MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_guardian, + setup_guardian: None, # relies on config_entry fixture ) -> None: """Test config entry diagnostics.""" data: GuardianData = hass.data[DOMAIN][config_entry.entry_id] @@ -38,6 +39,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "valve_controller": { diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 24c55c473b9..31c3a1fae39 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -14,7 +14,7 @@ from homeassistant.components.habitica.const import ( SERVICE_API_CALL, ) from homeassistant.const import ATTR_NAME -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import MockConfigEntry, async_capture_events from tests.test_util.aiohttp import AiohttpClientMocker @@ -24,13 +24,13 @@ TEST_USER_NAME = "test_user" @pytest.fixture -def capture_api_call_success(hass): +def capture_api_call_success(hass: HomeAssistant) -> list[Event]: """Capture api_call events.""" return async_capture_events(hass, EVENT_API_CALL_SUCCESS) @pytest.fixture -def habitica_entry(hass): +def habitica_entry(hass: HomeAssistant) -> MockConfigEntry: """Test entry for the following tests.""" entry = MockConfigEntry( domain=DOMAIN, @@ -88,6 +88,19 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: ] }, ) + aioclient_mock.get( + "https://habitica.com/api/v3/tasks/user?type=completedTodos", + json={ + "data": [ + { + "text": "this is a mock todo #5", + "id": 5, + "type": "todo", + "completed": True, + } + ] + }, + ) aioclient_mock.post( "https://habitica.com/api/v3/tasks/user", @@ -98,8 +111,9 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: return aioclient_mock +@pytest.mark.usefixtures("common_requests") async def test_entry_setup_unload( - hass: HomeAssistant, habitica_entry, common_requests + hass: HomeAssistant, habitica_entry: MockConfigEntry ) -> None: """Test integration setup and unload.""" assert await hass.config_entries.async_setup(habitica_entry.entry_id) @@ -112,8 +126,11 @@ async def test_entry_setup_unload( assert not hass.services.has_service(DOMAIN, SERVICE_API_CALL) +@pytest.mark.usefixtures("common_requests") async def test_service_call( - hass: HomeAssistant, habitica_entry, common_requests, capture_api_call_success + hass: HomeAssistant, + habitica_entry: MockConfigEntry, + capture_api_call_success: list[Event], ) -> None: """Test integration setup, service call and unload.""" diff --git a/tests/components/harmony/conftest.py b/tests/components/harmony/conftest.py index fb4be73aa72..759770e9746 100644 --- a/tests/components/harmony/conftest.py +++ b/tests/components/harmony/conftest.py @@ -1,10 +1,10 @@ """Fixtures for harmony tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aioharmony.const import ClientCallbackType import pytest -from typing_extensions import Generator from homeassistant.components.harmony.const import ACTIVITY_POWER_OFF, DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME diff --git a/tests/components/hassio/conftest.py b/tests/components/hassio/conftest.py index 7b79dfe6179..db1a07c4df3 100644 --- a/tests/components/hassio/conftest.py +++ b/tests/components/hassio/conftest.py @@ -1,5 +1,6 @@ """Fixtures for Hass.io.""" +from collections.abc import Generator import os import re from unittest.mock import Mock, patch @@ -7,6 +8,7 @@ from unittest.mock import Mock, patch from aiohttp.test_utils import TestClient import pytest +from homeassistant.auth.models import RefreshToken from homeassistant.components.hassio.handler import HassIO, HassioAPIError from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -19,7 +21,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def disable_security_filter(): +def disable_security_filter() -> Generator[None]: """Disable the security filter to ensure the integration is secure.""" with patch( "homeassistant.components.http.security_filter.FILTERS", @@ -29,7 +31,7 @@ def disable_security_filter(): @pytest.fixture -def hassio_env(): +def hassio_env() -> Generator[None]: """Fixture to inject hassio env.""" with ( patch.dict(os.environ, {"SUPERVISOR": "127.0.0.1"}), @@ -48,11 +50,11 @@ def hassio_env(): @pytest.fixture def hassio_stubs( - hassio_env, + hassio_env: None, hass: HomeAssistant, hass_client: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, -): +) -> RefreshToken: """Create mock hassio http client.""" with ( patch( @@ -86,7 +88,7 @@ def hassio_stubs( @pytest.fixture def hassio_client( - hassio_stubs, hass: HomeAssistant, hass_client: ClientSessionGenerator + hassio_stubs: RefreshToken, hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> TestClient: """Return a Hass.io HTTP client.""" return hass.loop.run_until_complete(hass_client()) @@ -94,7 +96,9 @@ def hassio_client( @pytest.fixture def hassio_noauth_client( - hassio_stubs, hass: HomeAssistant, aiohttp_client: ClientSessionGenerator + hassio_stubs: RefreshToken, + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, ) -> TestClient: """Return a Hass.io HTTP client without auth.""" return hass.loop.run_until_complete(aiohttp_client(hass.http.app)) @@ -102,7 +106,9 @@ def hassio_noauth_client( @pytest.fixture async def hassio_client_supervisor( - hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, hassio_stubs + hass: HomeAssistant, + aiohttp_client: ClientSessionGenerator, + hassio_stubs: RefreshToken, ) -> TestClient: """Return an authenticated HTTP client.""" access_token = hass.auth.async_create_access_token(hassio_stubs) @@ -113,7 +119,9 @@ async def hassio_client_supervisor( @pytest.fixture -async def hassio_handler(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker): +def hassio_handler( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> Generator[HassIO]: """Create mock hassio handler.""" with patch.dict(os.environ, {"SUPERVISOR_TOKEN": SUPERVISOR_TOKEN}): yield HassIO(hass.loop, async_get_clientsession(hass), "127.0.0.1") diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 55c663d66cc..6a20c6eec88 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -3,12 +3,12 @@ from __future__ import annotations import asyncio +from collections.abc import Generator import logging from typing import Any from unittest.mock import AsyncMock, call, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio.addon_manager import ( AddonError, diff --git a/tests/components/hassio/test_addon_panel.py b/tests/components/hassio/test_addon_panel.py index 8436b3393b9..f7407152f7e 100644 --- a/tests/components/hassio/test_addon_panel.py +++ b/tests/components/hassio/test_addon_panel.py @@ -24,8 +24,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: ) +@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_startup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_env + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test startup and panel setup after event.""" aioclient_mock.get( @@ -68,10 +69,10 @@ async def test_hassio_addon_panel_startup( ) +@pytest.mark.usefixtures("hassio_env") async def test_hassio_addon_panel_api( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - hassio_env, hass_client: ClientSessionGenerator, ) -> None: """Test panel api after event.""" diff --git a/tests/components/hassio/test_auth.py b/tests/components/hassio/test_auth.py index 175d9061d56..ad96b58e99d 100644 --- a/tests/components/hassio/test_auth.py +++ b/tests/components/hassio/test_auth.py @@ -3,11 +3,12 @@ from http import HTTPStatus from unittest.mock import Mock, patch +from aiohttp.test_utils import TestClient + from homeassistant.auth.providers.homeassistant import InvalidAuth -from homeassistant.core import HomeAssistant -async def test_auth_success(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_auth_success(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -23,7 +24,7 @@ async def test_auth_success(hass: HomeAssistant, hassio_client_supervisor) -> No mock_login.assert_called_with("test", "123456") -async def test_auth_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: +async def test_auth_fails_no_supervisor(hassio_client: TestClient) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -39,7 +40,7 @@ async def test_auth_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> N assert not mock_login.called -async def test_auth_fails_no_auth(hass: HomeAssistant, hassio_noauth_client) -> None: +async def test_auth_fails_no_auth(hassio_noauth_client: TestClient) -> None: """Test if only supervisor can access.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -55,7 +56,7 @@ async def test_auth_fails_no_auth(hass: HomeAssistant, hassio_noauth_client) -> assert not mock_login.called -async def test_login_error(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_error(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -72,7 +73,7 @@ async def test_login_error(hass: HomeAssistant, hassio_client_supervisor) -> Non mock_login.assert_called_with("test", "123456") -async def test_login_no_data(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_no_data(hassio_client_supervisor: TestClient) -> None: """Test auth with no data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -86,7 +87,7 @@ async def test_login_no_data(hass: HomeAssistant, hassio_client_supervisor) -> N assert not mock_login.called -async def test_login_no_username(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_login_no_username(hassio_client_supervisor: TestClient) -> None: """Test auth with no username in data -> error.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -102,9 +103,7 @@ async def test_login_no_username(hass: HomeAssistant, hassio_client_supervisor) assert not mock_login.called -async def test_login_success_extra( - hass: HomeAssistant, hassio_client_supervisor -) -> None: +async def test_login_success_extra(hassio_client_supervisor: TestClient) -> None: """Test auth with extra data.""" with patch( "homeassistant.auth.providers.homeassistant." @@ -125,7 +124,7 @@ async def test_login_success_extra( mock_login.assert_called_with("test", "123456") -async def test_password_success(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_password_success(hassio_client_supervisor: TestClient) -> None: """Test no auth needed for .""" with patch( "homeassistant.auth.providers.homeassistant." @@ -141,7 +140,7 @@ async def test_password_success(hass: HomeAssistant, hassio_client_supervisor) - mock_change.assert_called_with("test", "123456") -async def test_password_fails_no_supervisor(hass: HomeAssistant, hassio_client) -> None: +async def test_password_fails_no_supervisor(hassio_client: TestClient) -> None: """Test if only supervisor can access.""" resp = await hassio_client.post( "/api/hassio_auth/password_reset", @@ -152,9 +151,7 @@ async def test_password_fails_no_supervisor(hass: HomeAssistant, hassio_client) assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_fails_no_auth( - hass: HomeAssistant, hassio_noauth_client -) -> None: +async def test_password_fails_no_auth(hassio_noauth_client: TestClient) -> None: """Test if only supervisor can access.""" resp = await hassio_noauth_client.post( "/api/hassio_auth/password_reset", @@ -165,7 +162,7 @@ async def test_password_fails_no_auth( assert resp.status == HTTPStatus.UNAUTHORIZED -async def test_password_no_user(hass: HomeAssistant, hassio_client_supervisor) -> None: +async def test_password_no_user(hassio_client_supervisor: TestClient) -> None: """Test changing password for invalid user.""" resp = await hassio_client_supervisor.post( "/api/hassio_auth/password_reset", diff --git a/tests/components/hassio/test_discovery.py b/tests/components/hassio/test_discovery.py index 0783ee77932..305b863b3af 100644 --- a/tests/components/hassio/test_discovery.py +++ b/tests/components/hassio/test_discovery.py @@ -1,8 +1,10 @@ """Test config flow.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch +from aiohttp.test_utils import TestClient import pytest from homeassistant import config_entries @@ -18,7 +20,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="mock_mqtt") -async def mock_mqtt_fixture(hass): +def mock_mqtt_fixture( + hass: HomeAssistant, +) -> Generator[type[config_entries.ConfigFlow]]: """Mock the MQTT integration's config flow.""" mock_integration(hass, MockModule(MQTT_DOMAIN)) mock_platform(hass, f"{MQTT_DOMAIN}.config_flow", None) @@ -34,8 +38,11 @@ async def mock_mqtt_fixture(hass): yield MqttFlow +@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test startup and discovery after event.""" aioclient_mock.get( @@ -90,8 +97,11 @@ async def test_hassio_discovery_startup( ) +@pytest.mark.usefixtures("hassio_client") async def test_hassio_discovery_startup_done( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test startup and discovery with hass discovery.""" aioclient_mock.post( @@ -159,7 +169,10 @@ async def test_hassio_discovery_startup_done( async def test_hassio_discovery_webhook( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hassio_client, mock_mqtt + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hassio_client: TestClient, + mock_mqtt: type[config_entries.ConfigFlow], ) -> None: """Test discovery webhook.""" aioclient_mock.get( diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index c418576a802..c5fa6ff8254 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -365,8 +365,9 @@ async def test_api_headers( assert received_request.headers[hdrs.CONTENT_TYPE] == "application/octet-stream" +@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_green_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -389,8 +390,9 @@ async def test_api_get_green_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_green_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -407,8 +409,9 @@ async def test_api_set_green_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_get_yellow_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.get( @@ -427,8 +430,9 @@ async def test_api_get_yellow_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_set_yellow_settings( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -445,8 +449,9 @@ async def test_api_set_yellow_settings( assert aioclient_mock.call_count == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_api_reboot_host( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test setup with API ping.""" aioclient_mock.post( @@ -458,7 +463,8 @@ async def test_api_reboot_host( assert aioclient_mock.call_count == 1 -async def test_send_command_invalid_command(hass: HomeAssistant, hassio_stubs) -> None: +@pytest.mark.usefixtures("hassio_stubs") +async def test_send_command_invalid_command(hass: HomeAssistant) -> None: """Test send command fails when command is invalid.""" hassio: HassIO = hass.data["hassio"] with pytest.raises(HassioAPIError): diff --git a/tests/components/hassio/test_http.py b/tests/components/hassio/test_http.py index a5ffb4f0d83..404c047a56c 100644 --- a/tests/components/hassio/test_http.py +++ b/tests/components/hassio/test_http.py @@ -1,9 +1,11 @@ """The tests for the hassio component.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import patch from aiohttp import StreamReader +from aiohttp.test_utils import TestClient import pytest from tests.common import MockUser @@ -11,7 +13,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture -def mock_not_onboarded(): +def mock_not_onboarded() -> Generator[None]: """Mock that we're not onboarded.""" with patch( "homeassistant.components.hassio.http.async_is_onboarded", return_value=False @@ -20,7 +22,9 @@ def mock_not_onboarded(): @pytest.fixture -def hassio_user_client(hassio_client, hass_admin_user: MockUser): +def hassio_user_client( + hassio_client: TestClient, hass_admin_user: MockUser +) -> TestClient: """Return a Hass.io HTTP client tied to a non-admin user.""" hass_admin_user.groups = [] return hassio_client @@ -35,7 +39,7 @@ def hassio_user_client(hassio_client, hass_admin_user: MockUser): ], ) async def test_forward_request_onboarded_user_get( - hassio_user_client, aioclient_mock: AiohttpClientMocker, path: str + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -55,7 +59,7 @@ async def test_forward_request_onboarded_user_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_user_unallowed_methods( - hassio_user_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_user_client.post("/api/hassio/app/entrypoint.js") @@ -82,7 +86,7 @@ async def test_forward_request_onboarded_user_unallowed_methods( ], ) async def test_forward_request_onboarded_user_unallowed_paths( - hassio_user_client, + hassio_user_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -105,7 +109,7 @@ async def test_forward_request_onboarded_user_unallowed_paths( ], ) async def test_forward_request_onboarded_noauth_get( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, path: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str ) -> None: """Test fetching normal path.""" aioclient_mock.get(f"http://127.0.0.1/{path}", text="response") @@ -125,7 +129,7 @@ async def test_forward_request_onboarded_noauth_get( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_onboarded_noauth_unallowed_methods( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -152,7 +156,7 @@ async def test_forward_request_onboarded_noauth_unallowed_methods( ], ) async def test_forward_request_onboarded_noauth_unallowed_paths( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -176,7 +180,7 @@ async def test_forward_request_onboarded_noauth_unallowed_paths( ], ) async def test_forward_request_not_onboarded_get( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -212,7 +216,7 @@ async def test_forward_request_not_onboarded_get( ], ) async def test_forward_request_not_onboarded_post( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, mock_not_onboarded, @@ -238,7 +242,7 @@ async def test_forward_request_not_onboarded_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_not_onboarded_unallowed_methods( - hassio_noauth_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_noauth_client.post("/api/hassio/app/entrypoint.js") @@ -265,7 +269,7 @@ async def test_forward_request_not_onboarded_unallowed_methods( ], ) async def test_forward_request_not_onboarded_unallowed_paths( - hassio_noauth_client, + hassio_noauth_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -294,7 +298,7 @@ async def test_forward_request_not_onboarded_unallowed_paths( ], ) async def test_forward_request_admin_get( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, authenticated: bool, @@ -329,7 +333,7 @@ async def test_forward_request_admin_get( ], ) async def test_forward_request_admin_post( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, path: str, ) -> None: @@ -354,7 +358,7 @@ async def test_forward_request_admin_post( @pytest.mark.parametrize("method", ["POST", "PUT", "DELETE", "RANDOM"]) async def test_forward_request_admin_unallowed_methods( - hassio_client, aioclient_mock: AiohttpClientMocker, method: str + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, method: str ) -> None: """Test fetching normal path.""" resp = await hassio_client.post("/api/hassio/app/entrypoint.js") @@ -379,7 +383,7 @@ async def test_forward_request_admin_unallowed_methods( ], ) async def test_forward_request_admin_unallowed_paths( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, bad_path: str, expected_status: int, @@ -394,7 +398,7 @@ async def test_forward_request_admin_unallowed_paths( async def test_bad_gateway_when_cannot_find_supervisor( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Test we get a bad gateway error if we can't find supervisor.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js", exc=TimeoutError) @@ -404,9 +408,8 @@ async def test_bad_gateway_when_cannot_find_supervisor( async def test_backup_upload_headers( - hassio_client, + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, - caplog: pytest.LogCaptureFixture, mock_not_onboarded, ) -> None: """Test that we forward the full header for backup upload.""" @@ -427,7 +430,7 @@ async def test_backup_upload_headers( async def test_backup_download_headers( - hassio_client, aioclient_mock: AiohttpClientMocker, mock_not_onboarded + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker, mock_not_onboarded ) -> None: """Test that we forward the full header for backup download.""" content_disposition = "attachment; filename=test.tar" @@ -449,7 +452,9 @@ async def test_backup_download_headers( assert resp.headers["Content-Disposition"] == content_disposition -async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None: +async def test_stream( + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker +) -> None: """Verify that the request is a stream.""" content_type = "multipart/form-data; boundary='--webkit'" aioclient_mock.post("http://127.0.0.1/backups/new/upload") @@ -462,7 +467,7 @@ async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> Non async def test_simple_get_no_stream( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Verify that a simple GET request is not a stream.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") @@ -472,7 +477,7 @@ async def test_simple_get_no_stream( async def test_entrypoint_cache_control( - hassio_client, aioclient_mock: AiohttpClientMocker + hassio_client: TestClient, aioclient_mock: AiohttpClientMocker ) -> None: """Test that we return cache control for requests to the entrypoint only.""" aioclient_mock.get("http://127.0.0.1/app/entrypoint.js") diff --git a/tests/components/hassio/test_init.py b/tests/components/hassio/test_init.py index 0246b557ee4..d71e8acfbe0 100644 --- a/tests/components/hassio/test_init.py +++ b/tests/components/hassio/test_init.py @@ -486,7 +486,8 @@ async def test_warn_when_cannot_connect( assert "Not connected with the supervisor / system too busy!" in caplog.text -async def test_service_register(hassio_env, hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("hassio_env") +async def test_service_register(hass: HomeAssistant) -> None: """Check if service will be setup.""" assert await async_setup_component(hass, "hassio", {}) assert hass.services.has_service("hassio", "addon_start") @@ -717,8 +718,9 @@ async def test_addon_service_call_with_complex_slug( await hass.services.async_call("hassio", "addon_start", {"addon": "test.a_1-2"}) +@pytest.mark.usefixtures("hassio_env") async def test_service_calls_core( - hassio_env, hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Call core service and check the API calls behind that.""" assert await async_setup_component(hass, "homeassistant", {}) @@ -1116,8 +1118,9 @@ async def test_setup_hardware_integration( assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("hassio_stubs") async def test_get_store_addon_info( - hass: HomeAssistant, hassio_stubs, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test get store add-on info from Supervisor API.""" aioclient_mock.clear_requests() diff --git a/tests/components/hassio/test_issues.py b/tests/components/hassio/test_issues.py index ff0e4a8dd92..1a3d3d83f95 100644 --- a/tests/components/hassio/test_issues.py +++ b/tests/components/hassio/test_issues.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from datetime import timedelta from http import HTTPStatus import os @@ -22,13 +23,13 @@ from tests.typing import WebSocketGenerator @pytest.fixture(autouse=True) -async def setup_repairs(hass): +async def setup_repairs(hass: HomeAssistant) -> None: """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @pytest.fixture(autouse=True) -async def fixture_supervisor_environ(): +def fixture_supervisor_environ() -> Generator[None]: """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield @@ -40,7 +41,7 @@ def mock_resolution_info( unhealthy: list[str] | None = None, issues: list[dict[str, str]] | None = None, suggestion_result: str = "ok", -): +) -> None: """Mock resolution/info endpoint with unsupported/unhealthy reasons and/or issues.""" aioclient_mock.get( "http://127.0.0.1/resolution/info", @@ -80,7 +81,9 @@ def mock_resolution_info( ) -def assert_repair_in_list(issues: list[dict[str, Any]], unhealthy: bool, reason: str): +def assert_repair_in_list( + issues: list[dict[str, Any]], unhealthy: bool, reason: str +) -> None: """Assert repair for unhealthy/unsupported in list.""" repair_type = "unhealthy" if unhealthy else "unsupported" assert { @@ -108,7 +111,7 @@ def assert_issue_repair_in_list( *, reference: str | None = None, placeholders: dict[str, str] | None = None, -): +) -> None: """Assert repair for unhealthy/unsupported in list.""" if reference: placeholders = (placeholders or {}) | {"reference": reference} @@ -128,11 +131,11 @@ def assert_issue_repair_in_list( } in issues +@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test issues added for unhealthy systems.""" mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) @@ -150,11 +153,11 @@ async def test_unhealthy_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="setup") +@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test issues added for unsupported systems.""" mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) @@ -174,11 +177,11 @@ async def test_unsupported_issues( assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") +@pytest.mark.usefixtures("all_setup_requests") async def test_unhealthy_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test unhealthy issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -231,11 +234,11 @@ async def test_unhealthy_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_unsupported_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test unsupported issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -288,11 +291,11 @@ async def test_unsupported_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_reset_issues_supervisor_restart( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """All issues reset on supervisor restart.""" mock_resolution_info( @@ -352,11 +355,11 @@ async def test_reset_issues_supervisor_restart( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_reasons_added_and_removed( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test an unsupported/unhealthy reasons being added and removed at same time.""" mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) @@ -402,11 +405,11 @@ async def test_reasons_added_and_removed( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_ignored_unsupported_skipped( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Unsupported reasons which have an identical unhealthy reason are ignored.""" mock_resolution_info( @@ -425,11 +428,11 @@ async def test_ignored_unsupported_skipped( assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="privileged") +@pytest.mark.usefixtures("all_setup_requests") async def test_new_unsupported_unhealthy_reason( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """New unsupported/unhealthy reasons result in a generic repair until next core update.""" mock_resolution_info( @@ -475,11 +478,11 @@ async def test_new_unsupported_unhealthy_reason( } in msg["result"]["issues"] +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test repairs added for supervisor issue.""" mock_resolution_info( @@ -541,12 +544,12 @@ async def test_supervisor_issues( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_initial_failure( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, - all_setup_requests, ) -> None: """Test issues manager retries after initial update failure.""" responses = [ @@ -619,11 +622,11 @@ async def test_supervisor_issues_initial_failure( assert len(msg["result"]["issues"]) == 1 +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_add_remove( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test supervisor issues added and removed from dispatches.""" mock_resolution_info(aioclient_mock) @@ -730,11 +733,11 @@ async def test_supervisor_issues_add_remove( assert msg["result"] == {"issues": []} +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_suggestions_fail( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test failing to get suggestions for issue skips it.""" aioclient_mock.get( @@ -776,11 +779,11 @@ async def test_supervisor_issues_suggestions_fail( assert len(msg["result"]["issues"]) == 0 +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_remove_missing_issue_without_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test HA skips message to remove issue that it didn't know about (sync issue).""" mock_resolution_info(aioclient_mock) @@ -810,11 +813,11 @@ async def test_supervisor_remove_missing_issue_without_error( await hass.async_block_till_done() +@pytest.mark.usefixtures("all_setup_requests") async def test_system_is_not_ready( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - all_setup_requests, ) -> None: """Ensure hassio starts despite error.""" aioclient_mock.get( @@ -832,11 +835,11 @@ async def test_system_is_not_ready( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issues_detached_addon_missing( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_ws_client: WebSocketGenerator, - all_setup_requests, ) -> None: """Test supervisor issue for detached addon due to missing repository.""" mock_resolution_info(aioclient_mock) diff --git a/tests/components/hassio/test_repairs.py b/tests/components/hassio/test_repairs.py index 8d0bbfac87c..907529ec9c4 100644 --- a/tests/components/hassio/test_repairs.py +++ b/tests/components/hassio/test_repairs.py @@ -1,5 +1,6 @@ """Test supervisor repairs.""" +from collections.abc import Generator from http import HTTPStatus import os from unittest.mock import patch @@ -18,18 +19,18 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -async def fixture_supervisor_environ(): +def fixture_supervisor_environ() -> Generator[None]: """Mock os environ for supervisor.""" with patch.dict(os.environ, MOCK_ENVIRON): yield +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( @@ -103,12 +104,12 @@ async def test_supervisor_issue_repair_flow( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions.""" mock_resolution_info( @@ -197,12 +198,12 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confirmation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue with multiple suggestions and choice requires confirmation.""" mock_resolution_info( @@ -310,12 +311,12 @@ async def test_supervisor_issue_repair_flow_with_multiple_suggestions_and_confir ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_skip_confirmation( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test confirmation skipped for fix flow for supervisor issue with one suggestion.""" mock_resolution_info( @@ -389,12 +390,12 @@ async def test_supervisor_issue_repair_flow_skip_confirmation( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow_error( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test repair flow fails when repair fails to apply.""" mock_resolution_info( @@ -461,12 +462,12 @@ async def test_mount_failed_repair_flow_error( assert issue_registry.async_get_issue(domain="hassio", issue_id="1234") +@pytest.mark.usefixtures("all_setup_requests") async def test_mount_failed_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test repair flow for mount_failed issue.""" mock_resolution_info( @@ -562,12 +563,12 @@ async def test_mount_failed_repair_flow( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_docker_config_repair_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( @@ -669,12 +670,12 @@ async def test_supervisor_issue_docker_config_repair_flow( ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_repair_flow_multiple_data_disks( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for multiple data disks supervisor issue.""" mock_resolution_info( @@ -785,12 +786,12 @@ async def test_supervisor_issue_repair_flow_multiple_data_disks( @pytest.mark.parametrize( "all_setup_requests", [{"include_addons": True}], indirect=True ) +@pytest.mark.usefixtures("all_setup_requests") async def test_supervisor_issue_detached_addon_removed( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, hass_client: ClientSessionGenerator, issue_registry: ir.IssueRegistry, - all_setup_requests, ) -> None: """Test fix flow for supervisor issue.""" mock_resolution_info( diff --git a/tests/components/hassio/test_websocket_api.py b/tests/components/hassio/test_websocket_api.py index f3be391d9b7..7d8f07bfaec 100644 --- a/tests/components/hassio/test_websocket_api.py +++ b/tests/components/hassio/test_websocket_api.py @@ -79,8 +79,9 @@ def mock_all(aioclient_mock: AiohttpClientMocker) -> None: ) +@pytest.mark.usefixtures("hassio_env") async def test_ws_subscription( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test websocket subscription.""" assert await async_setup_component(hass, "hassio", {}) @@ -116,8 +117,8 @@ async def test_ws_subscription( assert response["success"] +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -160,8 +161,8 @@ async def test_websocket_supervisor_api( } +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -189,8 +190,8 @@ async def test_websocket_supervisor_api_error( assert msg["error"]["message"] == "example error" +@pytest.mark.usefixtures("hassio_env") async def test_websocket_supervisor_api_error_without_msg( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, @@ -218,8 +219,8 @@ async def test_websocket_supervisor_api_error_without_msg( assert msg["error"]["message"] == "" +@pytest.mark.usefixtures("hassio_env") async def test_websocket_non_admin_user( - hassio_env, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, diff --git a/tests/components/hdmi_cec/conftest.py b/tests/components/hdmi_cec/conftest.py index 0756ea639b7..058525f2448 100644 --- a/tests/components/hdmi_cec/conftest.py +++ b/tests/components/hdmi_cec/conftest.py @@ -1,16 +1,22 @@ """Tests for the HDMI-CEC component.""" -from unittest.mock import patch +from collections.abc import Callable, Coroutine, Generator +from typing import Any +from unittest.mock import MagicMock, patch import pytest from homeassistant.components.hdmi_cec import DOMAIN from homeassistant.const import EVENT_HOMEASSISTANT_START +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +type CecEntityCreator = Callable[..., Coroutine[Any, Any, None]] +type HDMINetworkCreator = Callable[..., Coroutine[Any, Any, MagicMock]] + @pytest.fixture(name="mock_cec_adapter", autouse=True) -def mock_cec_adapter_fixture(): +def mock_cec_adapter_fixture() -> Generator[MagicMock]: """Mock CecAdapter. Always mocked as it imports the `cec` library which is part of `libcec`. @@ -22,7 +28,7 @@ def mock_cec_adapter_fixture(): @pytest.fixture(name="mock_hdmi_network") -def mock_hdmi_network_fixture(): +def mock_hdmi_network_fixture() -> Generator[MagicMock]: """Mock HDMINetwork.""" with patch( "homeassistant.components.hdmi_cec.HDMINetwork", autospec=True @@ -31,7 +37,9 @@ def mock_hdmi_network_fixture(): @pytest.fixture -def create_hdmi_network(hass, mock_hdmi_network): +def create_hdmi_network( + hass: HomeAssistant, mock_hdmi_network: MagicMock +) -> HDMINetworkCreator: """Create an initialized mock hdmi_network.""" async def hdmi_network(config=None): @@ -49,7 +57,7 @@ def create_hdmi_network(hass, mock_hdmi_network): @pytest.fixture -def create_cec_entity(hass): +def create_cec_entity(hass: HomeAssistant) -> CecEntityCreator: """Create a CecEntity.""" async def cec_entity(hdmi_network, device): diff --git a/tests/components/hdmi_cec/test_init.py b/tests/components/hdmi_cec/test_init.py index 1263078c196..1b1861b0ef8 100644 --- a/tests/components/hdmi_cec/test_init.py +++ b/tests/components/hdmi_cec/test_init.py @@ -1,7 +1,9 @@ """Tests for the HDMI-CEC component.""" +from collections.abc import Generator from datetime import timedelta -from unittest.mock import ANY, PropertyMock, call, patch +from typing import Any +from unittest.mock import ANY, MagicMock, PropertyMock, call, patch import pytest import voluptuous as vol @@ -28,6 +30,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow from . import assert_key_press_release +from .conftest import HDMINetworkCreator from tests.common import ( MockEntity, @@ -38,7 +41,7 @@ from tests.common import ( @pytest.fixture(name="mock_tcp_adapter") -def mock_tcp_adapter_fixture(): +def mock_tcp_adapter_fixture() -> Generator[MagicMock]: """Mock TcpAdapter.""" with patch( "homeassistant.components.hdmi_cec.TcpAdapter", autospec=True @@ -88,7 +91,9 @@ def mock_tcp_adapter_fixture(): ), ], ) -def test_parse_mapping_physical_address(mapping, expected) -> None: +def test_parse_mapping_physical_address( + mapping: dict[str, Any], expected: list[tuple[str, list[int]]] +) -> None: """Test the device config mapping function.""" result = parse_mapping(mapping) result = [ @@ -101,7 +106,7 @@ def test_parse_mapping_physical_address(mapping, expected) -> None: async def test_setup_cec_adapter( - hass: HomeAssistant, mock_cec_adapter, mock_hdmi_network + hass: HomeAssistant, mock_cec_adapter: MagicMock, mock_hdmi_network: MagicMock ) -> None: """Test the general setup of this component.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -125,7 +130,7 @@ async def test_setup_cec_adapter( @pytest.mark.parametrize("osd_name", ["test", "test_a_long_name"]) async def test_setup_set_osd_name( - hass: HomeAssistant, osd_name, mock_cec_adapter + hass: HomeAssistant, osd_name: str, mock_cec_adapter: MagicMock ) -> None: """Test the setup of this component with the `osd_name` config setting.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {"osd_name": osd_name}}) @@ -134,7 +139,7 @@ async def test_setup_set_osd_name( async def test_setup_tcp_adapter( - hass: HomeAssistant, mock_tcp_adapter, mock_hdmi_network + hass: HomeAssistant, mock_tcp_adapter: MagicMock, mock_hdmi_network: MagicMock ) -> None: """Test the setup of this component with the TcpAdapter (`host` config setting).""" host = "0.0.0.0" @@ -161,7 +166,9 @@ async def test_setup_tcp_adapter( # Test services -async def test_service_power_on(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_power_on( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the power on service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -175,7 +182,9 @@ async def test_service_power_on(hass: HomeAssistant, create_hdmi_network) -> Non mock_hdmi_network_instance.power_on.assert_called_once_with() -async def test_service_standby(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_standby( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the standby service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -190,7 +199,7 @@ async def test_service_standby(hass: HomeAssistant, create_hdmi_network) -> None async def test_service_select_device_alias( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with a known alias.""" mock_hdmi_network_instance = await create_hdmi_network( @@ -220,7 +229,7 @@ class MockCecEntity(MockEntity): async def test_service_select_device_entity( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with an existing entity.""" platform = MockEntityPlatform(hass) @@ -244,7 +253,7 @@ async def test_service_select_device_entity( async def test_service_select_device_physical_address( - hass: HomeAssistant, create_hdmi_network + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator ) -> None: """Test the select device service call with a raw physical address.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -262,7 +271,9 @@ async def test_service_select_device_physical_address( assert str(physical_address) == "1.1.0.0" -async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network) -> None: +async def test_service_update_devices( + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator +) -> None: """Test the update devices service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -295,11 +306,11 @@ async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network) @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_x_times( hass: HomeAssistant, - create_hdmi_network, + create_hdmi_network: HDMINetworkCreator, count: int, call_count: int, - direction, - key, + direction: str, + key: int, ) -> None: """Test the volume service call with steps.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -320,7 +331,10 @@ async def test_service_volume_x_times( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_press( - hass: HomeAssistant, create_hdmi_network, direction, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + direction: str, + key: int, ) -> None: """Test the volume service call with press attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -341,7 +355,10 @@ async def test_service_volume_press( @pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)]) async def test_service_volume_release( - hass: HomeAssistant, create_hdmi_network, direction, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + direction: str, + key: int, ) -> None: """Test the volume service call with release attribute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -376,7 +393,7 @@ async def test_service_volume_release( ], ) async def test_service_volume_mute( - hass: HomeAssistant, create_hdmi_network, attr, key + hass: HomeAssistant, create_hdmi_network: HDMINetworkCreator, attr: str, key: int ) -> None: """Test the volume service call with mute.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -447,7 +464,10 @@ async def test_service_volume_mute( ], ) async def test_service_send_command( - hass: HomeAssistant, create_hdmi_network, data, expected + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + data: dict[str, Any], + expected: str, ) -> None: """Test the send command service call.""" mock_hdmi_network_instance = await create_hdmi_network() @@ -470,10 +490,10 @@ async def test_service_send_command( ) async def test_watchdog( hass: HomeAssistant, - create_hdmi_network, - mock_cec_adapter, - adapter_initialized_value, - watchdog_actions, + create_hdmi_network: HDMINetworkCreator, + mock_cec_adapter: MagicMock, + adapter_initialized_value: bool, + watchdog_actions: int, ) -> None: """Test the watchdog when adapter is down/up.""" adapter_initialized = PropertyMock(return_value=adapter_initialized_value) diff --git a/tests/components/hdmi_cec/test_media_player.py b/tests/components/hdmi_cec/test_media_player.py index 988279a235f..f193651c305 100644 --- a/tests/components/hdmi_cec/test_media_player.py +++ b/tests/components/hdmi_cec/test_media_player.py @@ -1,6 +1,7 @@ """Tests for the HDMI-CEC media player platform.""" from collections.abc import Callable +from typing import Any from pycec.const import ( DEVICE_TYPE_NAMES, @@ -55,6 +56,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice, assert_key_press_release +from .conftest import CecEntityCreator, HDMINetworkCreator type AssertState = Callable[[str, str], None] @@ -91,7 +93,9 @@ def assert_state_fixture(request: pytest.FixtureRequest) -> AssertState: async def test_load_platform( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that media_player entity is loaded.""" hdmi_network = await create_hdmi_network(config={"platform": "media_player"}) @@ -107,7 +111,10 @@ async def test_load_platform( @pytest.mark.parametrize("platform", [{}, {"platform": "switch"}]) async def test_load_types( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, platform + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + platform: dict[str, Any], ) -> None: """Test that media_player entity is loaded when types is set.""" config = platform | {"types": {"hdmi_cec.hdmi_4": "media_player"}} @@ -133,8 +140,8 @@ async def test_load_types( async def test_service_on( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test that media_player triggers on `on` service.""" @@ -160,8 +167,8 @@ async def test_service_on( async def test_service_off( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test that media_player triggers on `off` service.""" @@ -260,10 +267,10 @@ async def test_service_off( ) async def test_supported_features( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - type_id, - expected_features, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + type_id: int, + expected_features: MPEF, ) -> None: """Test that features load as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -289,11 +296,11 @@ async def test_supported_features( ) async def test_volume_services( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - service, - extra_data, - key, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + service: str, + extra_data: dict[str, Any] | None, + key: int, ) -> None: """Test volume related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -324,7 +331,11 @@ async def test_volume_services( ], ) async def test_track_change_services( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, service, key + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + service: str, + key: int, ) -> None: """Test track change related commands.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -360,8 +371,8 @@ async def test_track_change_services( ) async def test_playback_services( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, service: str, key: int, @@ -390,8 +401,8 @@ async def test_playback_services( @pytest.mark.xfail(reason="PLAY feature isn't enabled") async def test_play_pause_service( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, assert_state: AssertState, ) -> None: """Test play pause service.""" @@ -452,11 +463,11 @@ async def test_play_pause_service( ) async def test_update_state( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, - type_id, - update_data, - expected_state, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + type_id: int, + update_data: dict[str, Any], + expected_state: str, ) -> None: """Test state updates work as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -502,7 +513,11 @@ async def test_update_state( ], ) async def test_starting_state( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, data, expected_state + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + data: dict[str, Any], + expected_state: str, ) -> None: """Test starting states are set as expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) @@ -516,7 +531,9 @@ async def test_starting_state( reason="The code only sets the state to unavailable, doesn't set the `_attr_available` to false." ) async def test_unavailable_status( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network({"platform": "media_player"}) diff --git a/tests/components/hdmi_cec/test_switch.py b/tests/components/hdmi_cec/test_switch.py index d54d6cc103b..6ef6ce835ce 100644 --- a/tests/components/hdmi_cec/test_switch.py +++ b/tests/components/hdmi_cec/test_switch.py @@ -17,11 +17,15 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from . import MockHDMIDevice +from .conftest import CecEntityCreator, HDMINetworkCreator @pytest.mark.parametrize("config", [{}, {"platform": "switch"}]) async def test_load_platform( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, config + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + config, ) -> None: """Test that switch entity is loaded.""" hdmi_network = await create_hdmi_network(config=config) @@ -36,7 +40,9 @@ async def test_load_platform( async def test_load_types( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch entity is loaded when types is set.""" config = {"platform": "media_player", "types": {"hdmi_cec.hdmi_3": "switch"}} @@ -61,7 +67,9 @@ async def test_load_types( async def test_service_on( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch triggers on `on` service.""" hdmi_network = await create_hdmi_network() @@ -81,7 +89,9 @@ async def test_service_on( async def test_service_off( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test that switch triggers on `off` service.""" hdmi_network = await create_hdmi_network() @@ -118,8 +128,8 @@ async def test_service_off( ) async def test_device_status_change( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, power_status, expected_state, status, @@ -154,7 +164,11 @@ async def test_device_status_change( ], ) async def test_friendly_name( - hass: HomeAssistant, create_hdmi_network, create_cec_entity, device_values, expected + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, + device_values, + expected, ) -> None: """Test friendly name setup.""" hdmi_network = await create_hdmi_network() @@ -207,8 +221,8 @@ async def test_friendly_name( ) async def test_extra_state_attributes( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, device_values, expected_attributes, ) -> None: @@ -239,8 +253,8 @@ async def test_extra_state_attributes( ) async def test_icon( hass: HomeAssistant, - create_hdmi_network, - create_cec_entity, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, device_type, expected_icon, ) -> None: @@ -254,7 +268,9 @@ async def test_icon( async def test_unavailable_status( - hass: HomeAssistant, create_hdmi_network, create_cec_entity + hass: HomeAssistant, + create_hdmi_network: HDMINetworkCreator, + create_cec_entity: CecEntityCreator, ) -> None: """Test entity goes into unavailable status when expected.""" hdmi_network = await create_hdmi_network() diff --git a/tests/components/here_travel_time/test_config_flow.py b/tests/components/here_travel_time/test_config_flow.py index eb958991c71..ea3de64ed0c 100644 --- a/tests/components/here_travel_time/test_config_flow.py +++ b/tests/components/here_travel_time/test_config_flow.py @@ -6,17 +6,20 @@ from here_routing import HERERoutingError, HERERoutingUnauthorizedError import pytest from homeassistant import config_entries +from homeassistant.components.here_travel_time.config_flow import DEFAULT_OPTIONS from homeassistant.components.here_travel_time.const import ( CONF_ARRIVAL_TIME, CONF_DEPARTURE_TIME, CONF_DESTINATION_ENTITY_ID, CONF_DESTINATION_LATITUDE, CONF_DESTINATION_LONGITUDE, + CONF_ORIGIN_ENTITY_ID, CONF_ORIGIN_LATITUDE, CONF_ORIGIN_LONGITUDE, CONF_ROUTE_MODE, DOMAIN, ROUTE_MODE_FASTEST, + TRAVEL_MODE_BICYCLE, TRAVEL_MODE_CAR, TRAVEL_MODE_PUBLIC, ) @@ -47,7 +50,9 @@ def bypass_setup_fixture(): @pytest.fixture(name="user_step_result") -async def user_step_result_fixture(hass: HomeAssistant) -> FlowResultType: +async def user_step_result_fixture( + hass: HomeAssistant, +) -> config_entries.ConfigFlowResult: """Provide the result of a completed user step.""" init_result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -65,7 +70,9 @@ async def user_step_result_fixture(hass: HomeAssistant) -> FlowResultType: @pytest.fixture(name="option_init_result") -async def option_init_result_fixture(hass: HomeAssistant) -> FlowResultType: +async def option_init_result_fixture( + hass: HomeAssistant, +) -> config_entries.ConfigFlowResult: """Provide the result of a completed options init step.""" entry = MockConfigEntry( domain=DOMAIN, @@ -94,8 +101,8 @@ async def option_init_result_fixture(hass: HomeAssistant) -> FlowResultType: @pytest.fixture(name="origin_step_result") async def origin_step_result_fixture( - hass: HomeAssistant, user_step_result: FlowResultType -) -> FlowResultType: + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult +) -> config_entries.ConfigFlowResult: """Provide the result of a completed origin by coordinates step.""" origin_menu_result = await hass.config_entries.flow.async_configure( user_step_result["flow_id"], {"next_step_id": "origin_coordinates"} @@ -142,7 +149,7 @@ async def test_step_user(hass: HomeAssistant, menu_options) -> None: @pytest.mark.usefixtures("valid_response") async def test_step_origin_coordinates( - hass: HomeAssistant, user_step_result: FlowResultType + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -165,7 +172,7 @@ async def test_step_origin_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_origin_entity( - hass: HomeAssistant, user_step_result: FlowResultType + hass: HomeAssistant, user_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -182,7 +189,7 @@ async def test_step_origin_entity( @pytest.mark.usefixtures("valid_response") async def test_step_destination_coordinates( - hass: HomeAssistant, origin_step_result: FlowResultType + hass: HomeAssistant, origin_step_result: config_entries.ConfigFlowResult ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -216,7 +223,7 @@ async def test_step_destination_coordinates( @pytest.mark.usefixtures("valid_response") async def test_step_destination_entity( hass: HomeAssistant, - origin_step_result: FlowResultType, + origin_step_result: config_entries.ConfigFlowResult, ) -> None: """Test the origin coordinates step.""" menu_result = await hass.config_entries.flow.async_configure( @@ -245,6 +252,105 @@ async def test_step_destination_entity( } +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_entity(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing a destination entity.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], {"next_step_id": "destination_entity"} + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"destination_entity_id": "zone.home"}, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_ENTITY_ID: "zone.home", + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_coordinates(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing destination coordinates.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], + {"next_step_id": "destination_coordinates"}, + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + { + "destination": { + "latitude": 43.0, + "longitude": -80.3, + "radius": 5.0, + } + }, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_LATITUDE: 43.0, + CONF_DESTINATION_LONGITUDE: -80.3, + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +async def do_common_reconfiguration_steps(hass: HomeAssistant) -> None: + """Walk through common flow steps for reconfiguring.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="0123456789", + data=DEFAULT_CONFIG, + options=DEFAULT_OPTIONS, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + user_step_result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + { + CONF_API_KEY: API_KEY, + CONF_MODE: TRAVEL_MODE_BICYCLE, + CONF_NAME: "test", + }, + ) + await hass.async_block_till_done() + menu_result = await hass.config_entries.flow.async_configure( + user_step_result["flow_id"], {"next_step_id": "origin_entity"} + ) + return await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"origin_entity_id": "zone.home"}, + ) + + async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( @@ -322,7 +428,7 @@ async def test_options_flow(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("valid_response") async def test_options_flow_arrival_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -346,7 +452,7 @@ async def test_options_flow_arrival_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_departure_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow departure time type.""" menu_result = await hass.config_entries.options.async_configure( @@ -370,7 +476,7 @@ async def test_options_flow_departure_time_step( @pytest.mark.usefixtures("valid_response") async def test_options_flow_no_time_step( - hass: HomeAssistant, option_init_result: FlowResultType + hass: HomeAssistant, option_init_result: config_entries.ConfigFlowResult ) -> None: """Test the options flow arrival time type.""" menu_result = await hass.config_entries.options.async_configure( diff --git a/tests/components/history/conftest.py b/tests/components/history/conftest.py index 075909dfd63..dd10fccccdc 100644 --- a/tests/components/history/conftest.py +++ b/tests/components/history/conftest.py @@ -13,7 +13,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/history/test_websocket_api.py b/tests/components/history/test_websocket_api.py index e5c33d0e7af..717840c6b05 100644 --- a/tests/components/history/test_websocket_api.py +++ b/tests/components/history/test_websocket_api.py @@ -2,7 +2,7 @@ import asyncio from datetime import timedelta -from unittest.mock import patch +from unittest.mock import ANY, patch from freezegun import freeze_time import pytest @@ -10,8 +10,9 @@ import pytest from homeassistant.components import history from homeassistant.components.history import websocket_api from homeassistant.components.recorder import Recorder -from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE -from homeassistant.core import HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -2072,3 +2073,84 @@ async def test_history_stream_historical_only_with_start_time_state_past( "id": 1, "type": "event", } + + +async def test_history_stream_live_chained_events( + hass: HomeAssistant, recorder_mock: Recorder, hass_ws_client: WebSocketGenerator +) -> None: + """Test history stream with history with a chained event.""" + now = dt_util.utcnow() + await async_setup_component(hass, "history", {}) + + await async_wait_recording_done(hass) + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "history/stream", + "entity_ids": ["binary_sensor.is_light"], + "start_time": now.isoformat(), + "include_start_time_state": True, + "significant_changes_only": False, + "no_attributes": False, + "minimal_response": True, + } + ) + response = await client.receive_json() + assert response["success"] + assert response["id"] == 1 + assert response["type"] == "result" + + response = await client.receive_json() + + assert response == { + "event": { + "end_time": ANY, + "start_time": ANY, + "states": { + "binary_sensor.is_light": [ + { + "a": {}, + "lu": ANY, + "s": STATE_OFF, + }, + ], + }, + }, + "id": 1, + "type": "event", + } + + await async_recorder_block_till_done(hass) + + @callback + def auto_off_listener(event): + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) + + hass.states.async_set("binary_sensor.is_light", STATE_ON) + + response = await client.receive_json() + assert response == { + "event": { + "states": { + "binary_sensor.is_light": [ + { + "lu": ANY, + "s": STATE_ON, + "a": {}, + }, + { + "lu": ANY, + "s": STATE_OFF, + "a": {}, + }, + ], + }, + }, + "id": 1, + "type": "event", + } diff --git a/tests/components/history_stats/conftest.py b/tests/components/history_stats/conftest.py new file mode 100644 index 00000000000..f8075179e94 --- /dev/null +++ b/tests/components/history_stats/conftest.py @@ -0,0 +1,93 @@ +"""Fixtures for the History stats integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from datetime import timedelta +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch history stats setup.""" + with patch( + "homeassistant.components.history_stats.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the History stats integration in Home Assistant.""" + start_time = dt_util.utcnow() - timedelta(minutes=60) + t0 = start_time + timedelta(minutes=20) + t1 = t0 + timedelta(minutes=10) + t2 = t1 + timedelta(minutes=10) + + def _fake_states(*args, **kwargs): + return { + "binary_sensor.test_monitored": [ + State("binary_sensor.test_monitored", "off", last_changed=start_time), + State("binary_sensor.test_monitored", "on", last_changed=t0), + State("binary_sensor.test_monitored", "off", last_changed=t1), + State("binary_sensor.test_monitored", "on", last_changed=t2), + ] + } + + config_entry = MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.recorder.history.state_changes_during_period", + _fake_states, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + await async_update_entity(hass, "sensor.test") + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/history_stats/test_config_flow.py b/tests/components/history_stats/test_config_flow.py new file mode 100644 index 00000000000..a695a06995e --- /dev/null +++ b/tests/components/history_stats/test_config_flow.py @@ -0,0 +1,195 @@ +"""Test the History stats config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.history_stats.const import ( + CONF_DURATION, + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.components.recorder import Recorder +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form( + recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("sensor.unnamed_statistics") + assert state is not None + + +async def test_validation_options( + recorder_mock: Recorder, hass: HomeAssistant, mock_setup_entry: AsyncMock +) -> None: + """Test validation.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + CONF_DURATION: {"hours": 8, "minutes": 0, "seconds": 0, "days": 20}, + }, + ) + await hass.async_block_till_done() + + assert result["step_id"] == "options" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "only_two_keys_allowed"} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_entry_already_exist( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_monitored", + CONF_STATE: ["on"], + CONF_TYPE: "count", + }, + ) + await hass.async_block_till_done() + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/history_stats/test_init.py b/tests/components/history_stats/test_init.py new file mode 100644 index 00000000000..4cd999ba31c --- /dev/null +++ b/tests/components/history_stats/test_init.py @@ -0,0 +1,118 @@ +"""Test History stats component setup process.""" + +from __future__ import annotations + +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN as HISTORY_STATS_DOMAIN, +) +from homeassistant.components.recorder import Recorder +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ENTITY_ID, CONF_NAME, CONF_STATE, CONF_TYPE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_unload_entry( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_device_cleaning( + recorder_mock: Recorder, + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the cleaning of devices linked to the helper History stats.""" + + # Source entity device config entry + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + + # Device entry of the source entity + source_device1_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("binary_sensor", "identifier_test1")}, + connections={("mac", "30:31:32:33:34:01")}, + ) + + # Source entity registry + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device1_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + # Configure the configuration entry for History stats + history_stats_config_entry = MockConfigEntry( + data={}, + domain=HISTORY_STATS_DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # Device entry incorrectly linked to History stats config entry + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test2")}, + connections={("mac", "30:31:32:33:34:02")}, + ) + device_registry.async_get_or_create( + config_entry_id=history_stats_config_entry.entry_id, + identifiers={("sensor", "identifier_test3")}, + connections={("mac", "30:31:32:33:34:03")}, + ) + await hass.async_block_till_done() + + # Before reloading the config entry, two devices are expected to be linked + devices_before_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_before_reload) == 3 + + # Config entry reload + await hass.config_entries.async_reload(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + # Confirm the link between the source entity device and the History stats sensor + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id + + # After reloading the config entry, only one linked device is expected + devices_after_reload = device_registry.devices.get_devices_for_config_entry_id( + history_stats_config_entry.entry_id + ) + assert len(devices_after_reload) == 1 + + assert devices_after_reload[0].id == source_device1_entry.id diff --git a/tests/components/history_stats/test_sensor.py b/tests/components/history_stats/test_sensor.py index c18fb2ff784..f86c04b3e5b 100644 --- a/tests/components/history_stats/test_sensor.py +++ b/tests/components/history_stats/test_sensor.py @@ -8,20 +8,33 @@ import pytest import voluptuous as vol from homeassistant import config as hass_config -from homeassistant.components.history_stats import DOMAIN +from homeassistant.components.history_stats.const import ( + CONF_END, + CONF_START, + DEFAULT_NAME, + DOMAIN, +) from homeassistant.components.history_stats.sensor import ( PLATFORM_SCHEMA as SENSOR_SCHEMA, ) from homeassistant.components.recorder import Recorder -from homeassistant.const import ATTR_DEVICE_CLASS, SERVICE_RELOAD, STATE_UNKNOWN +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + CONF_ENTITY_ID, + CONF_NAME, + CONF_STATE, + CONF_TYPE, + SERVICE_RELOAD, + STATE_UNKNOWN, +) import homeassistant.core as ha from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, get_fixture_path +from tests.common import MockConfigEntry, async_fire_time_changed, get_fixture_path from tests.components.recorder.common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator @@ -48,6 +61,15 @@ async def test_setup(recorder_mock: Recorder, hass: HomeAssistant) -> None: assert state.state == "0.0" +async def test_setup_config_entry( + recorder_mock: Recorder, hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test the history statistics sensor setup from a config entry.""" + + state = hass.states.get("sensor.unnamed_statistics") + assert state.state == "2" + + async def test_setup_multiple_states( recorder_mock: Recorder, hass: HomeAssistant ) -> None: @@ -1727,3 +1749,50 @@ async def test_unique_id( entity_registry.async_get("sensor.test").unique_id == "some_history_stats_unique_id" ) + + +async def test_device_id( + recorder_mock: Recorder, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test for source entity device for History stats.""" + source_config_entry = MockConfigEntry() + source_config_entry.add_to_hass(hass) + source_device_entry = device_registry.async_get_or_create( + config_entry_id=source_config_entry.entry_id, + identifiers={("sensor", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + source_entity = entity_registry.async_get_or_create( + "binary_sensor", + "test", + "source", + config_entry=source_config_entry, + device_id=source_device_entry.id, + ) + await hass.async_block_till_done() + assert entity_registry.async_get("binary_sensor.test_source") is not None + + history_stats_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + CONF_NAME: DEFAULT_NAME, + CONF_ENTITY_ID: "binary_sensor.test_source", + CONF_STATE: ["on"], + CONF_TYPE: "count", + CONF_START: "{{ as_timestamp(utcnow()) - 3600 }}", + CONF_END: "{{ utcnow() }}", + }, + title="History stats", + ) + history_stats_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(history_stats_config_entry.entry_id) + await hass.async_block_till_done() + + history_stats_entity = entity_registry.async_get("sensor.history_stats") + assert history_stats_entity is not None + assert history_stats_entity.device_id == source_entity.device_id diff --git a/tests/components/holiday/conftest.py b/tests/components/holiday/conftest.py index 1ac595aa1f9..005756695fe 100644 --- a/tests/components/holiday/conftest.py +++ b/tests/components/holiday/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Holiday tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/home_connect/conftest.py b/tests/components/home_connect/conftest.py index f4c19320826..c8137a044a1 100644 --- a/tests/components/home_connect/conftest.py +++ b/tests/components/home_connect/conftest.py @@ -94,7 +94,7 @@ async def bypass_throttle(hass: HomeAssistant, config_entry: MockConfigEntry): @pytest.fixture(name="bypass_throttle") -def mock_bypass_throttle(): +def mock_bypass_throttle() -> Generator[None]: """Fixture to bypass the throttle decorator in __init__.""" with patch( "homeassistant.components.home_connect.update_all_devices", @@ -122,7 +122,7 @@ async def mock_integration_setup( @pytest.fixture(name="get_appliances") -def mock_get_appliances() -> Generator[None, Any, None]: +def mock_get_appliances() -> Generator[MagicMock]: """Mock ConfigEntryAuth parent (HomeAssistantAPI) method.""" with patch( "homeassistant.components.home_connect.api.ConfigEntryAuth.get_appliances", @@ -152,15 +152,18 @@ def mock_appliance(request: pytest.FixtureRequest) -> MagicMock: @pytest.fixture(name="problematic_appliance") -def mock_problematic_appliance() -> Mock: +def mock_problematic_appliance(request: pytest.FixtureRequest) -> Mock: """Fixture to mock a problematic Appliance.""" app = "Washer" + if hasattr(request, "param") and request.param: + app = request.param + mock = Mock( - spec=HomeConnectAppliance, + autospec=HomeConnectAppliance, **MOCK_APPLIANCES_PROPERTIES.get(app), ) mock.name = app - setattr(mock, "status", {}) + type(mock).status = PropertyMock(return_value={}) mock.get_programs_active.side_effect = HomeConnectError mock.get_programs_available.side_effect = HomeConnectError mock.start_program.side_effect = HomeConnectError diff --git a/tests/components/home_connect/fixtures/programs-available.json b/tests/components/home_connect/fixtures/programs-available.json index b99ee5c6add..bba1a5d2721 100644 --- a/tests/components/home_connect/fixtures/programs-available.json +++ b/tests/components/home_connect/fixtures/programs-available.json @@ -26,7 +26,7 @@ ] } }, - "DishWasher": { + "Dishwasher": { "data": { "programs": [ { diff --git a/tests/components/home_connect/fixtures/settings.json b/tests/components/home_connect/fixtures/settings.json index 5dc0f0e0599..eb6a5f5ff98 100644 --- a/tests/components/home_connect/fixtures/settings.json +++ b/tests/components/home_connect/fixtures/settings.json @@ -95,5 +95,21 @@ } ] } + }, + "Washer": { + "data": { + "settings": [ + { + "key": "BSH.Common.Setting.PowerState", + "value": "BSH.Common.EnumType.PowerState.On", + "type": "BSH.Common.EnumType.PowerState" + }, + { + "key": "BSH.Common.Setting.ChildLock", + "value": false, + "type": "Boolean" + } + ] + } } } diff --git a/tests/components/home_connect/test_binary_sensor.py b/tests/components/home_connect/test_binary_sensor.py index d21aec35045..39502507439 100644 --- a/tests/components/home_connect/test_binary_sensor.py +++ b/tests/components/home_connect/test_binary_sensor.py @@ -1,7 +1,6 @@ """Tests for home_connect binary_sensor entities.""" -from collections.abc import Awaitable, Callable, Generator -from typing import Any +from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock import pytest @@ -26,9 +25,8 @@ def platforms() -> list[str]: return [Platform.BINARY_SENSOR] +@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -51,10 +49,10 @@ async def test_binary_sensors( ("", "unavailable"), ], ) +@pytest.mark.usefixtures("bypass_throttle") async def test_binary_sensors_door_states( expected: str, state: str, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 616a82edebc..02d9bcaa208 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -1,16 +1,16 @@ """Test the integration init functionality.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import Awaitable, Callable from typing import Any from unittest.mock import MagicMock, Mock +from freezegun.api import FrozenDateTimeFactory import pytest from requests import HTTPError import requests_mock from homeassistant.components.home_connect.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -117,8 +117,8 @@ SERVICE_APPLIANCE_METHOD_MAPPING = { } +@pytest.mark.usefixtures("bypass_throttle") async def test_api_setup( - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -137,9 +137,38 @@ async def test_api_setup( assert config_entry.state == ConfigEntryState.NOT_LOADED -async def test_exception_handling( - bypass_throttle: Generator[None, Any, None], +async def test_update_throttle( + appliance: Mock, + freezer: FrozenDateTimeFactory, hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test to check Throttle functionality.""" + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + get_appliances_call_count = get_appliances.call_count + + # First re-load after 1 minute is not blocked. + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state == ConfigEntryState.NOT_LOADED + freezer.tick(60) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert get_appliances.call_count == get_appliances_call_count + 1 + + # Second re-load is blocked by Throttle. + assert await hass.config_entries.async_unload(config_entry.entry_id) + assert config_entry.state == ConfigEntryState.NOT_LOADED + freezer.tick(59) + assert await hass.config_entries.async_setup(config_entry.entry_id) + assert get_appliances.call_count == get_appliances_call_count + 1 + + +@pytest.mark.usefixtures("bypass_throttle") +async def test_exception_handling( integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, setup_credentials: None, @@ -154,8 +183,8 @@ async def test_exception_handling( @pytest.mark.parametrize("token_expiration_time", [12345]) +@pytest.mark.usefixtures("bypass_throttle") async def test_token_refresh_success( - bypass_throttle: Generator[None, Any, None], integration_setup: Callable[[], Awaitable[bool]], config_entry: MockConfigEntry, aioclient_mock: AiohttpClientMocker, @@ -192,44 +221,8 @@ async def test_token_refresh_success( ) -async def test_setup( - hass: HomeAssistant, - integration_setup: Callable[[], Awaitable[bool]], - config_entry: MockConfigEntry, - setup_credentials: None, -) -> None: - """Test setting up the integration.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - - assert await hass.config_entries.async_unload(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state == ConfigEntryState.NOT_LOADED - - -async def test_update_throttle( - appliance: Mock, - hass: HomeAssistant, - config_entry: MockConfigEntry, - integration_setup: Callable[[], Awaitable[bool]], - setup_credentials: None, - platforms: list[Platform], - get_appliances: MagicMock, -) -> None: - """Test to check Throttle functionality.""" - assert config_entry.state == ConfigEntryState.NOT_LOADED - - assert await integration_setup() - assert config_entry.state == ConfigEntryState.LOADED - assert get_appliances.call_count == 0 - - +@pytest.mark.usefixtures("bypass_throttle") async def test_http_error( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -247,9 +240,9 @@ async def test_http_error( "service_call", SERVICE_KV_CALL_PARAMS + SERVICE_COMMAND_CALL_PARAMS + SERVICE_PROGRAM_CALL_PARAMS, ) +@pytest.mark.usefixtures("bypass_throttle") async def test_services( service_call: list[dict[str, Any]], - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, @@ -279,8 +272,8 @@ async def test_services( ) +@pytest.mark.usefixtures("bypass_throttle") async def test_services_exception( - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py new file mode 100644 index 00000000000..8d918dc5815 --- /dev/null +++ b/tests/components/home_connect/test_light.py @@ -0,0 +1,298 @@ +"""Tests for home_connect light entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_AMBIENT_LIGHT_BRIGHTNESS, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR, + BSH_AMBIENT_LIGHT_ENABLED, + COOKING_LIGHTING, + COOKING_LIGHTING_BRIGHTNESS, +) +from homeassistant.components.light import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +TEST_HC_APP = "Hood" + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get(TEST_HC_APP) + .get("data") + .get("settings") +} + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.LIGHT] + + +async def test_light( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test switch entities.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "service_data", "state", "appliance"), + [ + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + }, + SERVICE_TURN_ON, + {}, + STATE_ON, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + STATE_ON, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: {"value": False}, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_OFF, + {}, + STATE_OFF, + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": None, + }, + COOKING_LIGHTING_BRIGHTNESS: None, + }, + SERVICE_TURN_ON, + {}, + STATE_UNKNOWN, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + STATE_ON, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": False}, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_OFF, + {}, + STATE_OFF, + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: {"value": True}, + BSH_AMBIENT_LIGHT_CUSTOM_COLOR: {}, + }, + SERVICE_TURN_ON, + {}, + STATE_ON, + "Hood", + ), + ], + indirect=["appliance"], +) +async def test_light_functionality( + entity_id: str, + status: dict, + service: str, + service_data: dict, + state: str, + appliance: Mock, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test light functionality.""" + appliance.status.update(SETTINGS_STATUS) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + service_data["entity_id"] = entity_id + await hass.services.async_call( + DOMAIN, + service, + service_data, + blocking=True, + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ( + "entity_id", + "status", + "service", + "service_data", + "mock_attr", + "attr_side_effect", + "problematic_appliance", + ), + [ + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": False, + }, + }, + SERVICE_TURN_ON, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: { + "value": True, + }, + COOKING_LIGHTING_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_light", + { + COOKING_LIGHTING: {"value": False}, + }, + SERVICE_TURN_OFF, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {}, + "set_setting", + [HomeConnectError, HomeConnectError], + "Hood", + ), + ( + "light.hood_ambientlight", + { + BSH_AMBIENT_LIGHT_ENABLED: { + "value": True, + }, + BSH_AMBIENT_LIGHT_BRIGHTNESS: {"value": 70}, + }, + SERVICE_TURN_ON, + {"brightness": 200}, + "set_setting", + [HomeConnectError, None, HomeConnectError, HomeConnectError], + "Hood", + ), + ], + indirect=["problematic_appliance"], +) +async def test_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + service_data: dict, + mock_attr: str, + attr_side_effect: list, + problematic_appliance: Mock, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + get_appliances: MagicMock, +) -> None: + """Test light exception handling.""" + problematic_appliance.status.update(SETTINGS_STATUS) + problematic_appliance.set_setting.side_effect = attr_side_effect + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + service_data["entity_id"] = entity_id + await hass.services.async_call(DOMAIN, service, service_data, blocking=True) + assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_sensor.py b/tests/components/home_connect/test_sensor.py index f30f017d6d3..661ac62403f 100644 --- a/tests/components/home_connect/test_sensor.py +++ b/tests/components/home_connect/test_sensor.py @@ -1,7 +1,6 @@ """Tests for home_connect sensor entities.""" -from collections.abc import Awaitable, Callable, Generator -from typing import Any +from collections.abc import Awaitable, Callable from unittest.mock import MagicMock, Mock from freezegun.api import FrozenDateTimeFactory @@ -69,9 +68,8 @@ def platforms() -> list[str]: return [Platform.SENSOR] +@pytest.mark.usefixtures("bypass_throttle") async def test_sensors( - bypass_throttle: Generator[None, Any, None], - hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], setup_credentials: None, @@ -131,12 +129,12 @@ ENTITY_ID_STATES = { ) ), ) +@pytest.mark.usefixtures("bypass_throttle") async def test_event_sensors( appliance: Mock, states: tuple, event_run: dict, freezer: FrozenDateTimeFactory, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], @@ -180,10 +178,10 @@ ENTITY_ID_EDGE_CASE_STATES = [ @pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True) +@pytest.mark.usefixtures("bypass_throttle") async def test_remaining_prog_time_edge_cases( appliance: Mock, freezer: FrozenDateTimeFactory, - bypass_throttle: Generator[None, Any, None], hass: HomeAssistant, config_entry: MockConfigEntry, integration_setup: Callable[[], Awaitable[bool]], diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py new file mode 100644 index 00000000000..c6a7b384036 --- /dev/null +++ b/tests/components/home_connect/test_switch.py @@ -0,0 +1,216 @@ +"""Tests for home_connect sensor entities.""" + +from collections.abc import Awaitable, Callable, Generator +from unittest.mock import MagicMock, Mock + +from homeconnect.api import HomeConnectError +import pytest + +from homeassistant.components.home_connect.const import ( + BSH_ACTIVE_PROGRAM, + BSH_CHILD_LOCK_STATE, + BSH_OPERATION_STATE, + BSH_POWER_OFF, + BSH_POWER_ON, + BSH_POWER_STATE, +) +from homeassistant.components.switch import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant + +from .conftest import get_all_appliances + +from tests.common import MockConfigEntry, load_json_object_fixture + +SETTINGS_STATUS = { + setting.pop("key"): setting + for setting in load_json_object_fixture("home_connect/settings.json") + .get("Washer") + .get("data") + .get("settings") +} + +PROGRAM = "LaundryCare.Dryer.Program.Mix" + + +@pytest.fixture +def platforms() -> list[str]: + """Fixture to specify platforms to test.""" + return [Platform.SWITCH] + + +async def test_switches( + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + get_appliances: Mock, +) -> None: + """Test switch entities.""" + get_appliances.side_effect = get_all_appliances + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "state"), + [ + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": ""}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": BSH_POWER_ON}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": BSH_POWER_OFF}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_power", + { + BSH_POWER_STATE: {"value": ""}, + BSH_OPERATION_STATE: { + "value": "BSH.Common.EnumType.OperationState.Inactive" + }, + }, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": True}}, + SERVICE_TURN_ON, + STATE_ON, + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": False}}, + SERVICE_TURN_OFF, + STATE_OFF, + ), + ], +) +async def test_switch_functionality( + entity_id: str, + status: dict, + service: str, + state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch functionality.""" + appliance.status.update(SETTINGS_STATUS) + appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "mock_attr"), + [ + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_ON, + "start_program", + ), + ( + "switch.washer_program_mix", + {BSH_ACTIVE_PROGRAM: {"value": PROGRAM}}, + SERVICE_TURN_OFF, + "stop_program", + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + ), + ( + "switch.washer_power", + {BSH_POWER_STATE: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + ), + ( + "switch.washer_childlock", + {BSH_CHILD_LOCK_STATE: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + ), + ], +) +async def test_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + mock_attr: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test exception handling.""" + problematic_appliance.get_programs_available.side_effect = None + problematic_appliance.get_programs_available.return_value = [PROGRAM] + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {"entity_id": entity_id}, blocking=True + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homeassistant/test_repairs.py b/tests/components/homeassistant/test_repairs.py new file mode 100644 index 00000000000..c7a1b3e762e --- /dev/null +++ b/tests/components/homeassistant/test_repairs.py @@ -0,0 +1,160 @@ +"""Test the Homeassistant repairs module.""" + +from http import HTTPStatus + +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator, WebSocketGenerator + + +async def test_integration_not_found_confirm_step( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the integration_not_found issue confirm step.""" + assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.async_block_till_done() + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + assert await async_setup_component(hass, "test1", {}) is False + await hass.async_block_till_done() + entry1 = MockConfigEntry(domain="test1") + entry1.add_to_hass(hass) + entry2 = MockConfigEntry(domain="test1") + entry2.add_to_hass(hass) + issue_id = "integration_not_found.test1" + + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + http_client = await hass_client() + + # Assert the issue is present + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + issue = msg["result"]["issues"][0] + assert issue["issue_id"] == issue_id + assert issue["translation_placeholders"] == {"domain": "test1"} + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "init" + assert data["description_placeholders"] == {"domain": "test1"} + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + + # Show menu + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "menu" + + # Apply fix + resp = await http_client.post(url, json={"next_step_id": "confirm"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" + + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(entry1.entry_id) is None + assert hass.config_entries.async_get_entry(entry2.entry_id) is None + + # Assert the issue is resolved + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 0 + + +async def test_integration_not_found_ignore_step( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the integration_not_found issue ignore step.""" + assert await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.async_block_till_done() + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + await hass.async_block_till_done() + assert await async_setup_component(hass, "test1", {}) is False + await hass.async_block_till_done() + entry1 = MockConfigEntry(domain="test1") + entry1.add_to_hass(hass) + issue_id = "integration_not_found.test1" + + await async_process_repairs_platforms(hass) + ws_client = await hass_ws_client(hass) + http_client = await hass_client() + + # Assert the issue is present + await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + issue = msg["result"]["issues"][0] + assert issue["issue_id"] == issue_id + assert issue["translation_placeholders"] == {"domain": "test1"} + + url = RepairsFlowIndexView.url + resp = await http_client.post( + url, json={"handler": HOMEASSISTANT_DOMAIN, "issue_id": issue_id} + ) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data["step_id"] == "init" + assert data["description_placeholders"] == {"domain": "test1"} + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + + # Show menu + resp = await http_client.post(url) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "menu" + + # Apply fix + resp = await http_client.post(url, json={"next_step_id": "ignore"}) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "abort" + assert data["reason"] == "issue_ignored" + + await hass.async_block_till_done() + + assert hass.config_entries.async_get_entry(entry1.entry_id) + + # Assert the issue is resolved + await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) + msg = await ws_client.receive_json() + assert msg["success"] + assert len(msg["result"]["issues"]) == 1 + assert msg["result"]["issues"][0].get("dismissed_version") is not None diff --git a/tests/components/homeassistant/triggers/test_event.py b/tests/components/homeassistant/triggers/test_event.py index b7bf8e5e7f3..293a9007175 100644 --- a/tests/components/homeassistant/triggers/test_event.py +++ b/tests/components/homeassistant/triggers/test_event.py @@ -7,28 +7,24 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_mock_service, mock_component +from tests.common import mock_component @pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -def context_with_user(): +def context_with_user() -> Context: """Create a context with default user_id.""" return Context(user_id="test_user_id") @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") -async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_fires_on_event( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the firing of events.""" context = Context() @@ -48,8 +44,8 @@ async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -57,15 +53,16 @@ async def test_if_fires_on_event(hass: HomeAssistant, calls: list[ServiceCall]) {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 async def test_if_fires_on_templated_event( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -84,8 +81,8 @@ async def test_if_fires_on_templated_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id await hass.services.async_call( automation.DOMAIN, @@ -93,14 +90,15 @@ async def test_if_fires_on_templated_event( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_multiple_events( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events.""" context = Context() @@ -123,13 +121,13 @@ async def test_if_fires_on_multiple_events( await hass.async_block_till_done() hass.bus.async_fire("test2_event", context=context) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].context.parent_id == context.id - assert calls[1].context.parent_id == context.id + assert len(service_calls) == 2 + assert service_calls[0].context.parent_id == context.id + assert service_calls[1].context.parent_id == context.id async def test_if_fires_on_event_extra_data( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events still matches with event data and context.""" assert await async_setup_component( @@ -146,7 +144,7 @@ async def test_if_fires_on_event_extra_data( "test_event", {"extra_key": "extra_data"}, context=context_with_user ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.services.async_call( automation.DOMAIN, @@ -154,14 +152,15 @@ async def test_if_fires_on_event_extra_data( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_event_with_data_and_context( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with data and context.""" assert await async_setup_component( @@ -189,7 +188,7 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "test_event", @@ -197,18 +196,18 @@ async def test_if_fires_on_event_with_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 # No new call + assert len(service_calls) == 1 # No new call hass.bus.async_fire( "test_event", {"some_attr": "some_value", "another": "value", "second_attr": "second_value"}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_templated_data_and_context( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with templated data and context.""" assert await async_setup_component( @@ -241,7 +240,7 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "test_event", @@ -249,18 +248,18 @@ async def test_if_fires_on_event_with_templated_data_and_context( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 # No new call + assert len(service_calls) == 1 # No new call hass.bus.async_fire( "test_event", {"attr_1": "milk", "another": "value", "attr_2": "beer"}, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_empty_data_and_context_config( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of events with empty data and context config. @@ -289,11 +288,11 @@ async def test_if_fires_on_event_with_empty_data_and_context_config( context=context_with_user, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_nested_data( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with nested data. @@ -319,11 +318,11 @@ async def test_if_fires_on_event_with_nested_data( "test_event", {"parent_attr": {"some_attr": "some_value", "another": "value"}} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_event_with_empty_data( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with empty data. @@ -345,11 +344,11 @@ async def test_if_fires_on_event_with_empty_data( ) hass.bus.async_fire("test_event", {"any_attr": {}}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_sample_zha_event( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing of events with a sample zha event. @@ -390,7 +389,7 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.bus.async_fire( "zha_event", @@ -404,11 +403,11 @@ async def test_if_fires_on_sample_zha_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_if_event_data_not_matches( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test firing of event if no data match.""" assert await async_setup_component( @@ -428,11 +427,11 @@ async def test_if_not_fires_if_event_data_not_matches( hass.bus.async_fire("test_event", {"some_attr": "some_other_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_event_context_not_matches( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test firing of event if no context match.""" assert await async_setup_component( @@ -452,11 +451,11 @@ async def test_if_not_fires_if_event_context_not_matches( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_multiple_user_ids( - hass: HomeAssistant, calls: list[ServiceCall], context_with_user: Context + hass: HomeAssistant, service_calls: list[ServiceCall], context_with_user: Context ) -> None: """Test the firing of event when the trigger has multiple user ids. @@ -481,11 +480,11 @@ async def test_if_fires_on_multiple_user_ids( hass.bus.async_fire("test_event", {}, context=context_with_user) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_event_data_with_list( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the (non)firing of event when the data schema has lists.""" assert await async_setup_component( @@ -506,17 +505,17 @@ async def test_event_data_with_list( hass.bus.async_fire("test_event", {"some_attr": [1, 2]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # don't match a single value hass.bus.async_fire("test_event", {"some_attr": 1}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # don't match a containing list hass.bus.async_fire("test_event", {"some_attr": [1, 2, 3]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -524,7 +523,7 @@ async def test_event_data_with_list( ) async def test_state_reported_event( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, event_type: str | list[str], ) -> None: @@ -547,7 +546,7 @@ async def test_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( "Unnamed automation failed to setup triggers and has been disabled: Can't " "listen to state_reported in event trigger for dictionary value @ " @@ -556,7 +555,9 @@ async def test_state_reported_event( async def test_templated_state_reported_event( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test triggering on state reported event.""" context = Context() @@ -578,7 +579,7 @@ async def test_templated_state_reported_event( hass.bus.async_fire("test_event", context=context) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert ( "Got error 'Can't listen to state_reported in event trigger' " "when setting up triggers for automation 0" in caplog.text diff --git a/tests/components/homeassistant/triggers/test_numeric_state.py b/tests/components/homeassistant/triggers/test_numeric_state.py index 59cd7e2a2a7..85882274fec 100644 --- a/tests/components/homeassistant/triggers/test_numeric_state.py +++ b/tests/components/homeassistant/triggers/test_numeric_state.py @@ -23,22 +23,11 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -async def setup_comp(hass): +async def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") await async_setup_component( @@ -63,7 +52,7 @@ async def setup_comp(hass): "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_removal( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with removed entity.""" hass.states.async_set("test.entity", 11) @@ -86,14 +75,14 @@ async def test_if_not_fires_on_entity_removal( # Entity disappears hass.states.async_remove("test.entity") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -120,8 +109,8 @@ async def test_if_fires_on_entity_change_below( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -132,10 +121,12 @@ async def test_if_fires_on_entity_change_below( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -144,7 +135,7 @@ async def test_if_fires_on_entity_change_below( async def test_if_fires_on_entity_change_below_uuid( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], below: int | str, ) -> None: """Test the firing with changed entity specified by registry entry id.""" @@ -177,8 +168,8 @@ async def test_if_fires_on_entity_change_below_uuid( # 9 is below 10 hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # Set above 12 so the automation will fire again hass.states.async_set("test.entity", 12) @@ -189,17 +180,19 @@ async def test_if_fires_on_entity_change_below_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_over_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -223,14 +216,14 @@ async def test_if_fires_on_entity_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entities_change_over_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entities.""" hass.states.async_set("test.entity_1", 11) @@ -255,17 +248,17 @@ async def test_if_fires_on_entities_change_over_to_below( # 9 is below 10 hass.states.async_set("test.entity_1", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_entity_change_below_to_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" context = Context() @@ -290,25 +283,25 @@ async def test_if_not_fires_on_entity_change_below_to_below( # 9 is below 10 so this should fire hass.states.async_set("test.entity", 9, context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id # already below so should not fire again hass.states.async_set("test.entity", 5) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # still below so should not fire again hass.states.async_set("test.entity", 3) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_below_fires_on_entity_change_to_equal( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -332,14 +325,14 @@ async def test_if_not_below_fires_on_entity_change_to_equal( # 10 is not below 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "below", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 9) @@ -363,14 +356,14 @@ async def test_if_not_fires_on_initial_entity_below( # Do not fire on first update when initial state was already below hass.states.async_set("test.entity", 8) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_not_fires_on_initial_entity_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing when starting with a match.""" hass.states.async_set("test.entity", 11) @@ -394,14 +387,14 @@ async def test_if_not_fires_on_initial_entity_above( # Do not fire on first update when initial state was already above hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( "above", [10, "input_number.value_10", "number.value_10", "sensor.value_10"] ) async def test_if_fires_on_entity_change_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 9) @@ -424,11 +417,11 @@ async def test_if_fires_on_entity_change_above( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_unavailable_at_startup( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the firing with changed entity at startup.""" assert await async_setup_component( @@ -448,12 +441,12 @@ async def test_if_fires_on_entity_unavailable_at_startup( # 11 is above 10 hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_to_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -478,12 +471,12 @@ async def test_if_fires_on_entity_change_below_to_above( # 11 is above 10 and 9 is below hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_above_to_above( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -508,17 +501,17 @@ async def test_if_not_fires_on_entity_change_above_to_above( # 12 is above 10 so this should fire hass.states.async_set("test.entity", 12) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # already above, should not fire again hass.states.async_set("test.entity", 15) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_if_not_above_fires_on_entity_change_to_equal( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test the firing with changed entity.""" # set initial state @@ -543,7 +536,7 @@ async def test_if_not_above_fires_on_entity_change_to_equal( # 10 is not above 10 so this should not fire again hass.states.async_set("test.entity", 10) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -556,7 +549,10 @@ async def test_if_not_above_fires_on_entity_change_to_equal( ], ) async def test_if_fires_on_entity_change_below_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -580,7 +576,7 @@ async def test_if_fires_on_entity_change_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -593,7 +589,10 @@ async def test_if_fires_on_entity_change_below_range( ], ) async def test_if_fires_on_entity_change_below_above_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" assert await async_setup_component( @@ -614,7 +613,7 @@ async def test_if_fires_on_entity_change_below_above_range( # 4 is below 5 hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -627,7 +626,10 @@ async def test_if_fires_on_entity_change_below_above_range( ], ) async def test_if_fires_on_entity_change_over_to_below_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -652,7 +654,7 @@ async def test_if_fires_on_entity_change_over_to_below_range( # 9 is below 10 hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -665,7 +667,10 @@ async def test_if_fires_on_entity_change_over_to_below_range( ], ) async def test_if_fires_on_entity_change_over_to_below_above_range( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test the firing with changed entity.""" hass.states.async_set("test.entity", 11) @@ -690,12 +695,12 @@ async def test_if_fires_on_entity_change_over_to_below_above_range( # 4 is below 5 so it should not fire hass.states.async_set("test.entity", 4) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [100, "input_number.value_100"]) async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test if not fired with non matching entity.""" assert await async_setup_component( @@ -715,11 +720,13 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_and_warns_if_below_entity_unknown( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, calls: list[ServiceCall] + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + service_calls: list[ServiceCall], ) -> None: """Test if warns with unknown below entity.""" assert await async_setup_component( @@ -742,7 +749,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( hass.states.async_set("test.entity", 1) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 assert len(caplog.record_tuples) == 1 assert caplog.record_tuples[0][1] == logging.WARNING @@ -750,7 +757,7 @@ async def test_if_not_fires_and_warns_if_below_entity_unknown( @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_entity_change_below_with_attribute( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", 11, {"test_attribute": 11}) @@ -773,12 +780,12 @@ async def test_if_fires_on_entity_change_below_with_attribute( # 9 is below 10 hass.states.async_set("test.entity", 9, {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_not_below_with_attribute( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes.""" assert await async_setup_component( @@ -798,12 +805,12 @@ async def test_if_not_fires_on_entity_change_not_below_with_attribute( # 11 is not below 10 hass.states.async_set("test.entity", 11, {"test_attribute": 9}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_fires_on_attribute_change_with_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) @@ -827,12 +834,12 @@ async def test_if_fires_on_attribute_change_with_attribute_below( # 9 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 9}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_attribute_change_with_attribute_not_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -853,12 +860,12 @@ async def test_if_not_fires_on_attribute_change_with_attribute_not_below( # 11 is not below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -879,12 +886,12 @@ async def test_if_not_fires_on_entity_change_with_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "9", {"test_attribute": 11}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_if_not_fires_on_entity_change_with_not_attribute_below( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" assert await async_setup_component( @@ -905,12 +912,12 @@ async def test_if_not_fires_on_entity_change_with_not_attribute_below( # 11 is not below 10, entity state value should not be tested hass.states.async_set("test.entity", "entity") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test attributes change.""" hass.states.async_set( @@ -937,12 +944,12 @@ async def test_fires_on_attr_change_with_attribute_below_and_multiple_attr( "test.entity", "entity", {"test_attribute": 9, "not_test_attribute": 11} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10, "input_number.value_10"]) async def test_template_list( - hass: HomeAssistant, calls: list[ServiceCall], below: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: int | str ) -> None: """Test template list.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -965,12 +972,12 @@ async def test_template_list( # 3 is below 10 hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("below", [10.0, "input_number.value_10"]) async def test_template_string( - hass: HomeAssistant, calls: list[ServiceCall], below: float | str + hass: HomeAssistant, service_calls: list[ServiceCall], below: float | str ) -> None: """Test template string.""" assert await async_setup_component( @@ -1004,15 +1011,15 @@ async def test_template_string( await hass.async_block_till_done() hass.states.async_set("test.entity", "test state 2", {"test_attribute": "0.9"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"numeric_state - test.entity - {below} - None - test state 1 - test state 2" ) async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if not fired changed attributes.""" assert await async_setup_component( @@ -1035,7 +1042,7 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( "test.entity", "entity", {"test_attribute": 11, "not_test_attribute": 9} ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1048,7 +1055,10 @@ async def test_not_fires_on_attr_change_with_attr_not_below_multiple_attr( ], ) async def test_if_action( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test if action.""" entity_id = "domain.test_entity" @@ -1073,19 +1083,19 @@ async def test_if_action( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, 8) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, 9) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -1098,7 +1108,7 @@ async def test_if_action( ], ) async def test_if_fails_setup_bad_for( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, above: int | str, below: int | str ) -> None: """Test for setup failure for bad for.""" hass.states.async_set("test.entity", 5) @@ -1124,9 +1134,7 @@ async def test_if_fails_setup_bad_for( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_above_below( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_above_below(hass: HomeAssistant) -> None: """Test for setup failures for missing above or below.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1158,7 +1166,7 @@ async def test_if_fails_setup_for_without_above_below( async def test_if_not_fires_on_entity_change_with_for( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1187,7 +1195,7 @@ async def test_if_not_fires_on_entity_change_with_for( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1200,7 +1208,10 @@ async def test_if_not_fires_on_entity_change_with_for( ], ) async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for not firing on entities change with for after stop.""" hass.states.async_set("test.entity_1", 0) @@ -1232,7 +1243,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_1", 15) hass.states.async_set("test.entity_2", 15) @@ -1246,9 +1257,11 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -1263,7 +1276,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( async def test_if_fires_on_entity_change_with_for_attribute_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1294,11 +1307,11 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( async_fire_time_changed(hass) hass.states.async_set("test.entity", 9, attributes={"mock_attr": "attr_change"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1311,7 +1324,10 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( ], ) async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on entity change with for.""" hass.states.async_set("test.entity", 0) @@ -1338,12 +1354,12 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [10, "input_number.value_10"]) async def test_wait_template_with_trigger( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test using wait template with 'trigger.entity_id'.""" hass.states.async_set("test.entity", "0") @@ -1381,8 +1397,8 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "12") hass.states.async_set("test.entity", "8") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "numeric_state - test.entity - 12" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "numeric_state - test.entity - 12" @pytest.mark.parametrize( @@ -1397,7 +1413,7 @@ async def test_wait_template_with_trigger( async def test_if_fires_on_entities_change_no_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1432,16 +1448,16 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1456,7 +1472,7 @@ async def test_if_fires_on_entities_change_no_overlap( async def test_if_fires_on_entities_change_overlap( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1500,18 +1516,18 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" @pytest.mark.parametrize( @@ -1524,7 +1540,10 @@ async def test_if_fires_on_entities_change_overlap( ], ) async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1549,10 +1568,10 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1565,7 +1584,10 @@ async def test_if_fires_on_change_with_for_template_1( ], ) async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1590,10 +1612,10 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1606,7 +1628,10 @@ async def test_if_fires_on_change_with_for_template_2( ], ) async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, + service_calls: list[ServiceCall], + above: int | str, + below: int | str, ) -> None: """Test for firing on change with for template.""" hass.states.async_set("test.entity", 0) @@ -1631,14 +1656,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_on_error_with_for_template( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on error with for template.""" hass.states.async_set("test.entity", 0) @@ -1662,17 +1687,17 @@ async def test_if_not_fires_on_error_with_for_template( hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", "unavailable") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) hass.states.async_set("test.entity", 101) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 @pytest.mark.parametrize( @@ -1685,7 +1710,7 @@ async def test_if_not_fires_on_error_with_for_template( ], ) async def test_invalid_for_template( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str, below: int | str + hass: HomeAssistant, above: int | str, below: int | str ) -> None: """Test for invalid for template.""" hass.states.async_set("test.entity", 0) @@ -1726,7 +1751,7 @@ async def test_invalid_for_template( async def test_if_fires_on_entities_change_overlap_for_template( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int | str, below: int | str, ) -> None: @@ -1773,22 +1798,22 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_below_above(hass: HomeAssistant) -> None: @@ -1823,7 +1848,7 @@ async def test_schema_unacceptable_entities(hass: HomeAssistant) -> None: @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1847,12 +1872,12 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize("above", [3, "input_number.value_3"]) async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall], above: int | str + hass: HomeAssistant, service_calls: list[ServiceCall], above: int | str ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"test-measurement": 1}) @@ -1880,10 +1905,10 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( hass.states.async_set("test.entity", "bla", {"test-measurement": 4}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 @pytest.mark.parametrize( @@ -1893,7 +1918,7 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( async def test_variables_priority( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], above: int, below: int, ) -> None: @@ -1941,17 +1966,17 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" @pytest.mark.parametrize("multiplier", [1, 5]) async def test_template_variable( - hass: HomeAssistant, calls: list[ServiceCall], multiplier: int + hass: HomeAssistant, service_calls: list[ServiceCall], multiplier: int ) -> None: """Test template variable.""" hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 11]}) @@ -1976,6 +2001,6 @@ async def test_template_variable( hass.states.async_set("test.entity", "entity", {"test_attribute": [11, 15, 3]}) await hass.async_block_till_done() if multiplier * 3 < 10: - assert len(calls) == 1 + assert len(service_calls) == 1 else: - assert len(calls) == 0 + assert len(service_calls) == 0 diff --git a/tests/components/homeassistant/triggers/test_state.py b/tests/components/homeassistant/triggers/test_state.py index a40ecae7579..83157a158a6 100644 --- a/tests/components/homeassistant/triggers/test_state.py +++ b/tests/components/homeassistant/triggers/test_state.py @@ -19,29 +19,18 @@ from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.states.async_set("test.entity", "hello") async def test_if_fires_on_entity_change( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change.""" context = Context() @@ -74,9 +63,12 @@ async def test_if_fires_on_entity_change( hass.states.async_set("test.entity", "world", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "state - test.entity - hello - world - None - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] + == "state - test.entity - hello - world - None - 0" + ) await hass.services.async_call( automation.DOMAIN, @@ -84,13 +76,16 @@ async def test_if_fires_on_entity_change( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set("test.entity", "planet") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_uuid( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change.""" context = Context() @@ -130,9 +125,11 @@ async def test_if_fires_on_entity_change_uuid( hass.states.async_set("test.beer", "world", context=context) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "state - test.beer - hello - world - None - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "state - test.beer - hello - world - None - 0" + ) await hass.services.async_call( automation.DOMAIN, @@ -140,13 +137,14 @@ async def test_if_fires_on_entity_change_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set("test.beer", "planet") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_from_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -167,11 +165,11 @@ async def test_if_fires_on_entity_change_with_from_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_from_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change inverse filter.""" assert await async_setup_component( @@ -193,15 +191,15 @@ async def test_if_fires_on_entity_change_with_not_from_filter( # Do not fire from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -222,11 +220,11 @@ async def test_if_fires_on_entity_change_with_to_filter( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -248,15 +246,15 @@ async def test_if_fires_on_entity_change_with_not_to_filter( # Do not fire to world hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls hass.states.async_set("test.entity", "universum") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_from_filter_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with filter.""" assert await async_setup_component( @@ -278,11 +276,11 @@ async def test_if_fires_on_entity_change_with_from_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_to_filter_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with to filter.""" assert await async_setup_component( @@ -304,11 +302,11 @@ async def test_if_fires_on_entity_change_with_to_filter_all( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "world", {"attribute": 5}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_attribute_change_with_to_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on attribute change.""" assert await async_setup_component( @@ -330,11 +328,11 @@ async def test_if_fires_on_attribute_change_with_to_filter( hass.states.async_set("test.entity", "world", {"test_attribute": 11}) hass.states.async_set("test.entity", "world", {"test_attribute": 12}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are a non match.""" assert await async_setup_component( @@ -356,11 +354,11 @@ async def test_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_not_from_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -383,31 +381,31 @@ async def test_if_fires_on_entity_change_with_not_from_to( # We should not trigger from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls # We should not trigger to != galaxy hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert not calls + assert not service_calls # We should trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should not trigger from milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_from_not_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if not from doesn't match and to match.""" assert await async_setup_component( @@ -430,31 +428,31 @@ async def test_if_fires_on_entity_change_with_from_not_to( # We should trigger to world from hello hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Reset back to hello, should not trigger hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should not trigger to galaxy hass.states.async_set("test.entity", "galaxy") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # We should trigger form galaxy to milky way hass.states.async_set("test.entity", "milky_way") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 # We should not trigger to universe hass.states.async_set("test.entity", "universe") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_if_not_fires_if_to_filter_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if to filter is not a match.""" assert await async_setup_component( @@ -476,11 +474,11 @@ async def test_if_not_fires_if_to_filter_not_match( hass.states.async_set("test.entity", "moon") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_from_filter_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if from filter is not a match.""" hass.states.async_set("test.entity", "bye") @@ -504,11 +502,11 @@ async def test_if_not_fires_if_from_filter_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_if_entity_not_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing if entity is not matching.""" assert await async_setup_component( @@ -525,10 +523,10 @@ async def test_if_not_fires_if_entity_not_match( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action(hass: HomeAssistant, service_calls: list[ServiceCall]) -> None: """Test for to action.""" entity_id = "domain.test_entity" test_state = "new_state" @@ -551,18 +549,16 @@ async def test_if_action(hass: HomeAssistant, calls: list[ServiceCall]) -> None: hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entity_id, test_state + "something") hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_fails_setup_if_to_boolean_value( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_if_to_boolean_value(hass: HomeAssistant) -> None: """Test for setup failure for boolean to.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -582,9 +578,7 @@ async def test_if_fails_setup_if_to_boolean_value( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_if_from_boolean_value( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_if_from_boolean_value(hass: HomeAssistant) -> None: """Test for setup failure for boolean from.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -604,9 +598,7 @@ async def test_if_fails_setup_if_from_boolean_value( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_bad_for( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_bad_for(hass: HomeAssistant) -> None: """Test for setup failure for bad for.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -628,7 +620,7 @@ async def test_if_fails_setup_bad_for( async def test_if_not_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for.""" assert await async_setup_component( @@ -654,11 +646,11 @@ async def test_if_not_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" assert await async_setup_component( @@ -686,7 +678,7 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set("test.entity_1", "world_no") hass.states.async_set("test.entity_2", "world_no") @@ -700,14 +692,17 @@ async def test_if_not_fires_on_entities_change_with_for_after_stop( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_entity_change_with_for_attribute_change( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for and attribute change.""" assert await async_setup_component( @@ -735,15 +730,17 @@ async def test_if_fires_on_entity_change_with_for_attribute_change( "test.entity", "world", attributes={"mock_attr": "attr_change"} ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for_multiple_force_update( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for and force update.""" assert await async_setup_component( @@ -770,15 +767,15 @@ async def test_if_fires_on_entity_change_with_for_multiple_force_update( async_fire_time_changed(hass) hass.states.async_set("test.force_entity", "world", None, True) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=4)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -802,11 +799,11 @@ async def test_if_fires_on_entity_change_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_entity_change_with_for_without_to( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -830,22 +827,24 @@ async def test_if_fires_on_entity_change_with_for_without_to( async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set("test.entity", "world") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=4)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entity change with for.""" assert await async_setup_component( @@ -871,11 +870,11 @@ async def test_if_does_not_fires_on_entity_change_with_for_without_to_2( async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_entity_creation_and_removal( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on entity creation and removal, with to/from constraints.""" # set automations for multiple combinations to/from @@ -917,32 +916,32 @@ async def test_if_fires_on_entity_creation_and_removal( # automation with match_all triggers on creation hass.states.async_set("test.entity_0", "any", context=context_0) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context_0.id + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context_0.id # create entities, trigger on test.entity_2 ('to' matches, no 'from') hass.states.async_set("test.entity_1", "hello", context=context_1) hass.states.async_set("test.entity_2", "world", context=context_2) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].context.parent_id == context_2.id + assert len(service_calls) == 2 + assert service_calls[1].context.parent_id == context_2.id # removal of both, trigger on test.entity_1 ('from' matches, no 'to') assert hass.states.async_remove("test.entity_1", context=context_1) assert hass.states.async_remove("test.entity_2", context=context_2) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].context.parent_id == context_1.id + assert len(service_calls) == 3 + assert service_calls[2].context.parent_id == context_1.id # automation with match_all triggers on removal assert hass.states.async_remove("test.entity_0", context=context_0) await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].context.parent_id == context_0.id + assert len(service_calls) == 4 + assert service_calls[3].context.parent_id == context_0.id async def test_if_fires_on_for_condition( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if condition is on.""" point1 = dt_util.utcnow() @@ -971,17 +970,17 @@ async def test_if_fires_on_for_condition( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future mock_utcnow.return_value = point2 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_for_condition_attribute_change( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if condition is on with attribute change.""" point1 = dt_util.utcnow() @@ -1011,7 +1010,7 @@ async def test_if_fires_on_for_condition_attribute_change( # not enough time has passed hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Still not enough time has passed, but an attribute is changed mock_utcnow.return_value = point2 @@ -1020,18 +1019,16 @@ async def test_if_fires_on_for_condition_attribute_change( ) hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Enough time has now passed mock_utcnow.return_value = point3 hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_fails_setup_for_without_time( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_time(hass: HomeAssistant) -> None: """Test for setup failure if no time is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1053,9 +1050,7 @@ async def test_if_fails_setup_for_without_time( assert hass.states.get("automation.automation_0").state == STATE_UNAVAILABLE -async def test_if_fails_setup_for_without_entity( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_if_fails_setup_for_without_entity(hass: HomeAssistant) -> None: """Test for setup failure if no entity is provided.""" with assert_setup_component(1, automation.DOMAIN): assert await async_setup_component( @@ -1077,7 +1072,7 @@ async def test_if_fails_setup_for_without_entity( async def test_wait_template_with_trigger( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test using wait template with 'trigger.entity_id'.""" assert await async_setup_component( @@ -1113,12 +1108,14 @@ async def test_wait_template_with_trigger( hass.states.async_set("test.entity", "world") hass.states.async_set("test.entity", "hello") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "state - test.entity - hello - world" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "state - test.entity - hello - world" async def test_if_fires_on_entities_change_no_overlap( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with no overlap.""" assert await async_setup_component( @@ -1146,20 +1143,22 @@ async def test_if_fires_on_entities_change_no_overlap( freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() freezer.tick(timedelta(seconds=10)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_entities_change_overlap( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with overlap.""" assert await async_setup_component( @@ -1196,22 +1195,22 @@ async def test_if_fires_on_entities_change_overlap( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2" async def test_if_fires_on_change_with_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1232,14 +1231,14 @@ async def test_if_fires_on_change_with_for_template_1( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1260,14 +1259,14 @@ async def test_if_fires_on_change_with_for_template_2( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_3( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1288,14 +1287,14 @@ async def test_if_fires_on_change_with_for_template_3( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_with_for_template_4( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with for template.""" assert await async_setup_component( @@ -1317,14 +1316,14 @@ async def test_if_fires_on_change_with_for_template_4( hass.states.async_set("test.entity", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_change_from_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1351,11 +1350,11 @@ async def test_if_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_on_change_from_with_for( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on change with from/for.""" assert await async_setup_component( @@ -1382,12 +1381,10 @@ async def test_if_not_fires_on_change_from_with_for( await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_invalid_for_template_1( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_invalid_for_template_1(hass: HomeAssistant) -> None: """Test for invalid for template.""" assert await async_setup_component( hass, @@ -1412,7 +1409,9 @@ async def test_invalid_for_template_1( async def test_if_fires_on_entities_change_overlap_for_template( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing on entities change with overlap and for template.""" assert await async_setup_component( @@ -1452,26 +1451,26 @@ async def test_if_fires_on_entities_change_overlap_for_template( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" async def test_attribute_if_fires_on_entity_change_with_both_filters( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1496,11 +1495,11 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters( hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1524,21 +1523,21 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant( # Leave all attributes the same hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Change the untracked attribute hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "new_value"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Change the tracked attribute hass.states.async_set("test.entity", "bla", {"name": "world", "other": "old_value"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "other_name"}) @@ -1565,25 +1564,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_filter( "test.entity", "bla", {"name": "best_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "best_name", "other": "new_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "other_name", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if attribute stays the same.""" hass.states.async_set("test.entity", "bla", {"name": "hello", "other": "old_value"}) @@ -1610,25 +1609,25 @@ async def test_attribute_if_fires_on_entity_where_attr_stays_constant_all( "test.entity", "bla", {"name": "name_1", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the untracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_1", "other": "new_value"} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Change the tracked attribute hass.states.async_set( "test.entity", "bla", {"name": "name_2", "other": "old_value"} ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on entity change with for after stop trigger.""" hass.states.async_set("test.entity", "bla", {"name": "hello"}) @@ -1658,33 +1657,33 @@ async def test_attribute_if_not_fires_on_entities_change_with_for_after_stop( # Test that the for-check works hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=2)) hass.states.async_set("test.entity", "bla", {"name": "world", "something": "else"}) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Now remove state while inside "for" hass.states.async_set("test.entity", "bla", {"name": "hello"}) hass.states.async_set("test.entity", "bla", {"name": "world"}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_remove("test.entity") await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing if both filters are match attribute.""" hass.states.async_set("test.entity", "bla", {"happening": False}) @@ -1709,11 +1708,13 @@ async def test_attribute_if_fires_on_entity_change_with_both_filters_boolean( hass.states.async_set("test.entity", "bla", {"happening": True}) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_variables_priority( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test an externally defined trigger variable is overridden.""" assert await async_setup_component( @@ -1754,19 +1755,19 @@ async def test_variables_priority( async_fire_time_changed(hass) hass.states.async_set("test.entity_2", "world") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test.entity_1 - 0:00:05" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test.entity_1 - 0:00:05" freezer.tick(timedelta(seconds=3)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 freezer.tick(timedelta(seconds=5)) async_fire_time_changed(hass) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "test.entity_2 - 0:00:10" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "test.entity_2 - 0:00:10" diff --git a/tests/components/homeassistant/triggers/test_time.py b/tests/components/homeassistant/triggers/test_time.py index 961bac6c367..76d80120fdd 100644 --- a/tests/components/homeassistant/triggers/test_time.py +++ b/tests/components/homeassistant/triggers/test_time.py @@ -20,28 +20,19 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import ( - assert_setup_component, - async_fire_time_changed, - async_mock_service, - mock_component, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import assert_setup_component, async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") async def test_if_fires_using_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at.""" now = dt_util.now() @@ -71,9 +62,9 @@ async def test_if_fires_using_at( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "time - 5" - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "time - 5" + assert service_calls[0].data["id"] == 0 @pytest.mark.parametrize( @@ -82,7 +73,7 @@ async def test_if_fires_using_at( async def test_if_fires_using_at_input_datetime( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - calls: list[ServiceCall], + service_calls: list[ServiceCall], has_date, has_time, ) -> None: @@ -132,9 +123,9 @@ async def test_if_fires_using_at_input_datetime( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) @@ -152,20 +143,23 @@ async def test_if_fires_using_at_input_datetime( }, blocking=True, ) + assert len(service_calls) == 3 await hass.async_block_till_done() async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 assert ( - calls[1].data["some"] + service_calls[3].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-input_datetime.trigger" ) async def test_if_fires_using_multiple_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at.""" @@ -195,18 +189,20 @@ async def test_if_fires_using_multiple_at( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "time - 5" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "time - 5" async_fire_time_changed(hass, trigger_dt + timedelta(hours=1, seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "time - 6" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "time - 6" async def test_if_not_fires_using_wrong_at( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """YAML translates time values to total seconds. @@ -242,10 +238,12 @@ async def test_if_not_fires_using_wrong_at( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_if_action_before(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action_before( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for if action before.""" assert await async_setup_component( hass, @@ -267,16 +265,18 @@ async def test_if_action_before(hass: HomeAssistant, calls: list[ServiceCall]) - hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 -async def test_if_action_after(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_if_action_after( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for if action after.""" assert await async_setup_component( hass, @@ -298,17 +298,17 @@ async def test_if_action_after(hass: HomeAssistant, calls: list[ServiceCall]) -> hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 with patch("homeassistant.helpers.condition.dt_util.now", return_value=after_10): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_action_one_weekday( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for if action with one weekday.""" assert await async_setup_component( @@ -332,17 +332,17 @@ async def test_if_action_one_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_action_list_weekday( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for action with a list of weekdays.""" assert await async_setup_component( @@ -367,19 +367,19 @@ async def test_if_action_list_weekday( hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 with patch("homeassistant.helpers.condition.dt_util.now", return_value=tuesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 with patch("homeassistant.helpers.condition.dt_util.now", return_value=wednesday): hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_untrack_time_change(hass: HomeAssistant) -> None: @@ -416,7 +416,9 @@ async def test_untrack_time_change(hass: HomeAssistant) -> None: async def test_if_fires_using_at_sensor( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at sensor time.""" now = dt_util.now() @@ -452,9 +454,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -470,9 +472,9 @@ async def test_if_fires_using_at_sensor( async_fire_time_changed(hass, trigger_dt + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"time-{trigger_dt.day}-{trigger_dt.hour}-sensor.next_alarm" ) @@ -494,7 +496,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(calls) == 2 + assert len(service_calls) == 2 # Now without device class hass.states.async_set( @@ -513,7 +515,7 @@ async def test_if_fires_using_at_sensor( await hass.async_block_till_done() # We should not have listened to anything - assert len(calls) == 2 + assert len(service_calls) == 2 @pytest.mark.parametrize( @@ -544,7 +546,7 @@ def test_schema_invalid(conf) -> None: async def test_datetime_in_past_on_load( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test time trigger works if input_datetime is in past.""" await async_setup_component( @@ -566,6 +568,7 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) + assert len(service_calls) == 1 await hass.async_block_till_done() assert await async_setup_component( @@ -587,7 +590,7 @@ async def test_datetime_in_past_on_load( async_fire_time_changed(hass, now) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 await hass.services.async_call( "input_datetime", @@ -598,13 +601,14 @@ async def test_datetime_in_past_on_load( }, blocking=True, ) + assert len(service_calls) == 2 await hass.async_block_till_done() async_fire_time_changed(hass, future + timedelta(seconds=1)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 3 assert ( - calls[0].data["some"] + service_calls[2].data["some"] == f"time-{future.day}-{future.hour}-input_datetime.my_trigger" ) diff --git a/tests/components/homeassistant/triggers/test_time_pattern.py b/tests/components/homeassistant/triggers/test_time_pattern.py index 327623d373b..7138fd7dd02 100644 --- a/tests/components/homeassistant/triggers/test_time_pattern.py +++ b/tests/components/homeassistant/triggers/test_time_pattern.py @@ -13,23 +13,19 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, async_mock_service, mock_component - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_fire_time_changed, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") async def test_if_fires_when_hour_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if hour is matching.""" now = dt_util.utcnow() @@ -58,7 +54,8 @@ async def test_if_fires_when_hour_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, hour=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 await hass.services.async_call( automation.DOMAIN, @@ -66,15 +63,17 @@ async def test_if_fires_when_hour_matches( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, now.replace(year=now.year + 1, day=1, hour=0)) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 async def test_if_fires_when_minute_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if minutes are matching.""" now = dt_util.utcnow() @@ -101,11 +100,13 @@ async def test_if_fires_when_minute_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, minute=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_second_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -132,11 +133,13 @@ async def test_if_fires_when_second_matches( async_fire_time_changed(hass, now.replace(year=now.year + 2, day=1, second=0)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_second_as_string_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if seconds are matching.""" now = dt_util.utcnow() @@ -165,11 +168,13 @@ async def test_if_fires_when_second_as_string_matches( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_when_all_matches( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing if everything matches.""" now = dt_util.utcnow() @@ -198,11 +203,13 @@ async def test_if_fires_when_all_matches( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_periodic_seconds( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every second.""" now = dt_util.utcnow() @@ -231,11 +238,13 @@ async def test_if_fires_periodic_seconds( ) await hass.async_block_till_done() - assert len(calls) >= 1 + assert len(service_calls) >= 1 async def test_if_fires_periodic_minutes( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every minute.""" @@ -265,11 +274,13 @@ async def test_if_fires_periodic_minutes( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_periodic_hours( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing periodically every hour.""" now = dt_util.utcnow() @@ -298,11 +309,13 @@ async def test_if_fires_periodic_hours( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_default_values( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, calls: list[ServiceCall] + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + service_calls: list[ServiceCall], ) -> None: """Test for firing at 2 minutes every hour.""" now = dt_util.utcnow() @@ -326,24 +339,24 @@ async def test_default_values( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=1, minute=2, second=1) ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed( hass, now.replace(year=now.year + 2, day=1, hour=2, minute=2, second=0) ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 -async def test_invalid_schemas(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_invalid_schemas() -> None: """Test invalid schemas.""" schemas = ( None, diff --git a/tests/components/homeassistant_alerts/test_init.py b/tests/components/homeassistant_alerts/test_init.py index 444db019c7c..0a38778bbee 100644 --- a/tests/components/homeassistant_alerts/test_init.py +++ b/tests/components/homeassistant_alerts/test_init.py @@ -26,7 +26,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -def stub_alert(aioclient_mock, alert_id): +def stub_alert(aioclient_mock: AiohttpClientMocker, alert_id) -> None: """Stub an alert.""" aioclient_mock.get( f"https://alerts.home-assistant.io/alerts/{alert_id}.json", @@ -35,7 +35,7 @@ def stub_alert(aioclient_mock, alert_id): @pytest.fixture(autouse=True) -async def setup_repairs(hass): +async def setup_repairs(hass: HomeAssistant) -> None: """Set up the repairs integration.""" assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) @@ -99,9 +99,9 @@ async def test_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - supervisor_info, - expected_alerts, + ha_version: str, + supervisor_info: dict[str, str] | None, + expected_alerts: list[tuple[str, str]], ) -> None: """Test creating issues based on alerts.""" @@ -292,12 +292,12 @@ async def test_alerts_refreshed_on_component_load( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - supervisor_info, - initial_components, - late_components, - initial_alerts, - late_alerts, + ha_version: str, + supervisor_info: dict[str, str] | None, + initial_components: list[str], + late_components: list[str], + initial_alerts: list[tuple[str, str]], + late_alerts: list[tuple[str, str]], freezer: FrozenDateTimeFactory, ) -> None: """Test alerts are refreshed when components are loaded.""" @@ -433,9 +433,9 @@ async def test_bad_alerts( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aioclient_mock: AiohttpClientMocker, - ha_version, - fixture, - expected_alerts, + ha_version: str, + fixture: str, + expected_alerts: list[tuple[str, str]], ) -> None: """Test creating issues based on alerts.""" fixture_content = load_fixture(fixture, "homeassistant_alerts") diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index 72e937396ea..b62ccaf855b 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Hardware integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> None: + def mock_probe(config: dict[str, Any]) -> dict[str, Any]: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} diff --git a/tests/components/homeassistant_hardware/test_config_flow.py b/tests/components/homeassistant_hardware/test_config_flow.py new file mode 100644 index 00000000000..a1842f4c4e6 --- /dev/null +++ b/tests/components/homeassistant_hardware/test_config_flow.py @@ -0,0 +1,674 @@ +"""Test the Home Assistant hardware firmware config flow.""" + +import asyncio +from collections.abc import Awaitable, Callable, Generator, Iterator +import contextlib +from typing import Any +from unittest.mock import AsyncMock, Mock, call, patch + +import pytest +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_THREAD, + STEP_PICK_FIRMWARE_ZIGBEE, + BaseFirmwareConfigFlow, + BaseFirmwareOptionsFlow, +) +from homeassistant.components.homeassistant_hardware.util import ( + get_otbr_addon_manager, + get_zigbee_flasher_addon_manager, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult, OptionsFlow +from homeassistant.core import HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) + +TEST_DOMAIN = "test_firmware_domain" +TEST_DEVICE = "/dev/SomeDevice123" +TEST_HARDWARE_NAME = "Some Hardware Name" + + +class FakeFirmwareConfigFlow(BaseFirmwareConfigFlow, domain=TEST_DOMAIN): + """Config flow for `test_firmware_domain`.""" + + VERSION = 1 + MINOR_VERSION = 2 + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Return the options flow.""" + return FakeFirmwareOptionsFlowHandler(config_entry) + + async def async_step_hardware( + self, data: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle hardware flow.""" + self._device = TEST_DEVICE + self._hardware_name = TEST_HARDWARE_NAME + + return await self.async_step_confirm() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._device is not None + assert self._hardware_name is not None + assert self._probed_firmware_type is not None + + return self.async_create_entry( + title=self._hardware_name, + data={ + "device": self._device, + "firmware": self._probed_firmware_type.value, + "hardware": self._hardware_name, + }, + ) + + +class FakeFirmwareOptionsFlowHandler(BaseFirmwareOptionsFlow): + """Options flow for `test_firmware_domain`.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._device = self.config_entry.data["device"] + self._hardware_name = self.config_entry.data["hardware"] + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + "firmware": self._probed_firmware_type.value, + }, + options=self.config_entry.options, + ) + + return self.async_create_entry(title="", data={}) + + +@pytest.fixture(autouse=True) +def mock_test_firmware_platform( + hass: HomeAssistant, +) -> Generator[None]: + """Fixture for a test config flow.""" + mock_module = MockModule( + TEST_DOMAIN, async_setup_entry=AsyncMock(return_value=True) + ) + mock_integration(hass, mock_module) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow") + + with mock_config_flow(TEST_DOMAIN, FakeFirmwareConfigFlow): + yield + + +def delayed_side_effect() -> Callable[..., Awaitable[None]]: + """Slows down eager tasks by delaying for an event loop tick.""" + + async def side_effect(*args: Any, **kwargs: Any) -> None: + await asyncio.sleep(0) + + return side_effect + + +@contextlib.contextmanager +def mock_addon_info( + hass: HomeAssistant, + *, + is_hassio: bool = True, + app_type: ApplicationType = ApplicationType.EZSP, + otbr_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), + flasher_addon_info: AddonInfo = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_INSTALLED, + update_available=False, + version=None, + ), +) -> Iterator[tuple[Mock, Mock]]: + """Mock the main addon states for the config flow.""" + mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) + mock_flasher_manager.addon_name = "Silicon Labs Flasher" + mock_flasher_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info + + mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) + mock_otbr_manager.addon_name = "OpenThread Border Router" + mock_otbr_manager.async_install_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_start_addon_waiting = AsyncMock( + side_effect=delayed_side_effect() + ) + mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info + + with ( + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_otbr_addon_manager", + return_value=mock_otbr_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.get_zigbee_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.is_hassio", + return_value=is_hassio, + ), + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", + return_value=app_type, + ), + ): + yield mock_otbr_manager, mock_flasher_manager + + +async def test_config_flow_zigbee(hass: HomeAssistant) -> None: + """Test the config flow.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +async def test_config_flow_zigbee_skip_step_if_installed(hass: HomeAssistant) -> None: + """Test the config flow, skip installing the addon if necessary.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + flasher_addon_info=AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we skip installation, instead we directly run it + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + # Uninstall the addon + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # Done + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + +async def test_config_flow_thread(hass: HomeAssistant) -> None: + """Test the config flow.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "spinel", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + +async def test_config_flow_thread_addon_already_installed(hass: HomeAssistant) -> None: + """Test the Thread config flow, addon is already installed.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + otbr_addon_info=AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version=None, + ), + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_config_flow_zigbee_not_hassio(hass: HomeAssistant) -> None: + """Test when the stick is used with a non-hassio setup.""" + result = await hass.config_entries.flow.async_init( + TEST_DOMAIN, context={"source": "hardware"} + ) + + with mock_addon_info( + hass, + is_hassio=False, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" + + +async def test_options_flow_zigbee_to_thread(hass: HomeAssistant) -> None: + """Test the options flow, migrating Zigbee to Thread.""" + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "firmware": "ezsp", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "ezsp" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + with mock_addon_info( + hass, + app_type=ApplicationType.EZSP, + ) as (mock_otbr_manager, mock_flasher_manager): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + ) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_otbr_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={ + "device": "", + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + }, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.2.3", + ) + + # Progress the flow, it is now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "start_otbr_addon" + assert result["progress_action"] == "start_otbr_addon" + + assert mock_otbr_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 460800, + "flow_control": True, + "autoflash_firmware": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # The addon is now running + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_otbr" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "spinel" + + +async def test_options_flow_thread_to_zigbee(hass: HomeAssistant) -> None: + """Test the options flow, migrating Thread to Zigbee.""" + config_entry = MockConfigEntry( + domain=TEST_DOMAIN, + data={ + "firmware": "spinel", + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, + }, + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(config_entry.entry_id) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == TEST_HARDWARE_NAME + + with mock_addon_info( + hass, + app_type=ApplicationType.SPINEL, + ) as (mock_otbr_manager, mock_flasher_manager): + # Pick the menu option: we are now installing the addon + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now configuring the addon and running it + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "run_zigbee_flasher_addon" + assert result["progress_action"] == "run_zigbee_flasher_addon" + assert mock_flasher_manager.async_set_addon_options.mock_calls == [ + call( + { + "device": TEST_DEVICE, + "baudrate": 115200, + "bootloader_baudrate": 115200, + "flow_control": True, + } + ) + ] + + await hass.async_block_till_done(wait_background_tasks=True) + + # Progress the flow, we are now uninstalling the addon + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["step_id"] == "uninstall_zigbee_flasher_addon" + assert result["progress_action"] == "uninstall_zigbee_flasher_addon" + + await hass.async_block_till_done(wait_background_tasks=True) + + # We are finally done with the addon + assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] + + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_zigbee" + + # We are now done + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # The firmware type has been updated + assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py b/tests/components/homeassistant_hardware/test_config_flow_failures.py similarity index 73% rename from tests/components/homeassistant_sky_connect/test_config_flow_failures.py rename to tests/components/homeassistant_hardware/test_config_flow_failures.py index b29f8d808ae..4c3ea7d28fa 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow_failures.py +++ b/tests/components/homeassistant_hardware/test_config_flow_failures.py @@ -1,38 +1,43 @@ -"""Test the Home Assistant SkyConnect config flow failure cases.""" +"""Test the Home Assistant hardware firmware config flow failure cases.""" from unittest.mock import AsyncMock import pytest from universal_silabs_flasher.const import ApplicationType -from homeassistant.components import usb from homeassistant.components.hassio.addon_manager import ( AddonError, AddonInfo, AddonState, ) -from homeassistant.components.homeassistant_sky_connect.config_flow import ( +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( STEP_PICK_FIRMWARE_THREAD, STEP_PICK_FIRMWARE_ZIGBEE, ) -from homeassistant.components.homeassistant_sky_connect.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .test_config_flow import USB_DATA_ZBT1, delayed_side_effect, mock_addon_info +from .test_config_flow import ( + TEST_DEVICE, + TEST_DOMAIN, + TEST_HARDWARE_NAME, + delayed_side_effect, + mock_addon_info, + mock_test_firmware_platform, # noqa: F401 +) from tests.common import MockConfigEntry @pytest.mark.parametrize( - ("usb_data", "model", "next_step"), + "next_step", [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_ZIGBEE), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1", STEP_PICK_FIRMWARE_THREAD), + STEP_PICK_FIRMWARE_ZIGBEE, + STEP_PICK_FIRMWARE_THREAD, ], ) async def test_config_flow_cannot_probe_firmware( - usb_data: usb.UsbServiceInfo, model: str, next_step: str, hass: HomeAssistant + next_step: str, hass: HomeAssistant ) -> None: """Test failure case when firmware cannot be probed.""" @@ -42,7 +47,7 @@ async def test_config_flow_cannot_probe_firmware( ) as (mock_otbr_manager, mock_flasher_manager): # Start the flow result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) result = await hass.config_entries.flow.async_configure( @@ -54,18 +59,12 @@ async def test_config_flow_cannot_probe_firmware( assert result["reason"] == "unsupported_firmware" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_not_hassio_wrong_firmware( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test when the stick is used with a non-hassio setup but the firmware is bad.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -85,18 +84,12 @@ async def test_config_flow_zigbee_not_hassio_wrong_firmware( assert result["reason"] == "not_hassio" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_already_running( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon is already running.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -125,18 +118,10 @@ async def test_config_flow_zigbee_flasher_addon_already_running( assert result["reason"] == "addon_already_running" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_addon_info_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -166,18 +151,12 @@ async def test_config_flow_zigbee_flasher_addon_info_fails( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_install_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -202,18 +181,12 @@ async def test_config_flow_zigbee_flasher_addon_install_fails( assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_config_flow_zigbee_flasher_addon_set_config_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -242,18 +215,10 @@ async def test_config_flow_zigbee_flasher_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_run_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -279,18 +244,10 @@ async def test_config_flow_zigbee_flasher_run_fails( assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_flasher_uninstall_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_zigbee_flasher_uninstall_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -319,18 +276,10 @@ async def test_config_flow_zigbee_flasher_uninstall_fails( assert result["step_id"] == "confirm_zigbee" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_not_hassio( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_not_hassio(hass: HomeAssistant) -> None: """Test when the stick is used with a non-hassio setup and Thread is selected.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -350,18 +299,10 @@ async def test_config_flow_thread_not_hassio( assert result["reason"] == "not_hassio_thread" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_info_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_info_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -382,18 +323,10 @@ async def test_config_flow_thread_addon_info_fails( assert result["reason"] == "addon_info_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_already_running( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_already_running(hass: HomeAssistant) -> None: """Test failure case when the Thread addon is already running.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -425,18 +358,10 @@ async def test_config_flow_thread_addon_already_running( assert result["reason"] == "otbr_addon_already_running" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_install_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_install_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be installed.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -460,18 +385,10 @@ async def test_config_flow_thread_addon_install_fails( assert result["reason"] == "addon_install_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_set_config_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_addon_set_config_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon cannot be configured.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -495,18 +412,10 @@ async def test_config_flow_thread_addon_set_config_fails( assert result["reason"] == "addon_set_config_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_flasher_run_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_flasher_run_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon fails to run.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -531,18 +440,10 @@ async def test_config_flow_thread_flasher_run_fails( assert result["reason"] == "addon_start_failed" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_flasher_uninstall_fails( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: +async def test_config_flow_thread_flasher_uninstall_fails(hass: HomeAssistant) -> None: """Test failure case when flasher addon uninstall fails.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data + TEST_DOMAIN, context={"source": "hardware"} ) with mock_addon_info( @@ -572,27 +473,16 @@ async def test_config_flow_thread_flasher_uninstall_fails( assert result["step_id"] == "confirm_otbr" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_options_flow_zigbee_to_thread_zha_configured( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test the options flow migration failure, ZHA using the stick.""" config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", + domain=TEST_DOMAIN, data={ "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, }, version=1, minor_version=2, @@ -604,7 +494,7 @@ async def test_options_flow_zigbee_to_thread_zha_configured( # Set up ZHA as well zha_config_entry = MockConfigEntry( domain="zha", - data={"device": {"path": usb_data.device}}, + data={"device": {"path": TEST_DEVICE}}, ) zha_config_entry.add_to_hass(hass) @@ -620,27 +510,16 @@ async def test_options_flow_zigbee_to_thread_zha_configured( assert result["reason"] == "zha_still_using_stick" -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) async def test_options_flow_thread_to_zigbee_otbr_configured( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test the options flow migration failure, OTBR still using the stick.""" config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", + domain=TEST_DOMAIN, data={ "firmware": "spinel", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, + "device": TEST_DEVICE, + "hardware": TEST_HARDWARE_NAME, }, version=1, minor_version=2, @@ -658,7 +537,7 @@ async def test_options_flow_thread_to_zigbee_otbr_configured( otbr_addon_info=AddonInfo( available=True, hostname=None, - options={"device": usb_data.device}, + options={"device": TEST_DEVICE}, state=AddonState.RUNNING, update_available=False, version="1.0.0", diff --git a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py index 1df8fa86cf9..5718133cd24 100644 --- a/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py +++ b/tests/components/homeassistant_hardware/test_silabs_multiprotocol_addon.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio import AddonError, AddonInfo, AddonState, HassIO from homeassistant.components.hassio.handler import HassioAPIError diff --git a/tests/components/homeassistant_hardware/test_util.py b/tests/components/homeassistant_hardware/test_util.py new file mode 100644 index 00000000000..4a30a39686f --- /dev/null +++ b/tests/components/homeassistant_hardware/test_util.py @@ -0,0 +1,158 @@ +"""Test hardware utilities.""" + +from unittest.mock import AsyncMock, patch + +from universal_silabs_flasher.const import ApplicationType + +from homeassistant.components.hassio import AddonError, AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.util import ( + FirmwareGuess, + get_zha_device_path, + guess_firmware_type, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +ZHA_CONFIG_ENTRY = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={ + "device": { + "path": "socket://1.2.3.4:5678", + "baudrate": 115200, + "flow_control": None, + }, + "radio_type": "ezsp", + }, + version=4, +) + + +def test_get_zha_device_path() -> None: + """Test extracting the ZHA device path from its config entry.""" + assert ( + get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] + ) + + +def test_get_zha_device_path_ignored_discovery() -> None: + """Test extracting the ZHA device path from an ignored ZHA discovery.""" + config_entry = MockConfigEntry( + domain="zha", + unique_id="some_unique_id", + data={}, + version=4, + ) + + assert get_zha_device_path(config_entry) is None + + +async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: + """Test guessing the firmware type.""" + + assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" + ) + + +async def test_guess_firmware_type(hass: HomeAssistant) -> None: + """Test guessing the firmware.""" + path = ZHA_CONFIG_ENTRY.data["device"]["path"] + + ZHA_CONFIG_ENTRY.add_to_hass(hass) + + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=False, firmware_type=ApplicationType.EZSP, source="zha" + ) + + # When ZHA is running, we indicate as such when guessing + ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager = AsyncMock() + mock_multipan_addon_manager = AsyncMock() + + with ( + patch( + "homeassistant.components.homeassistant_hardware.util.is_hassio", + return_value=True, + ), + patch( + "homeassistant.components.homeassistant_hardware.util.get_otbr_addon_manager", + return_value=mock_otbr_addon_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.util.get_multiprotocol_addon_manager", + return_value=mock_multipan_addon_manager, + ), + ): + mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() + mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() + + # Hassio errors are ignored and we still go with ZHA + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.side_effect = None + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": "/some/other/device"}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will prefer ZHA, as it is running (and actually pointing to the device) + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + # We will still prefer ZHA, as it is the one actually running + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.EZSP, source="zha" + ) + + mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Finally, ZHA loses out to OTBR + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" + ) + + mock_multipan_addon_manager.async_get_addon_info.side_effect = None + mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": path}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + # Which will lose out to multi-PAN + assert (await guess_firmware_type(hass, path)) == FirmwareGuess( + is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" + ) diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index 099582999d5..69b0901aadf 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -1,9 +1,9 @@ """Test fixtures for the Home Assistant SkyConnect integration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(name="mock_usb_serial_by_id", autouse=True) diff --git a/tests/components/homeassistant_sky_connect/test_config_flow.py b/tests/components/homeassistant_sky_connect/test_config_flow.py index a4b7b4fb81d..0d4c517b07f 100644 --- a/tests/components/homeassistant_sky_connect/test_config_flow.py +++ b/tests/components/homeassistant_sky_connect/test_config_flow.py @@ -1,30 +1,20 @@ """Test the Home Assistant SkyConnect config flow.""" -import asyncio -from collections.abc import Awaitable, Callable -import contextlib -from typing import Any -from unittest.mock import AsyncMock, Mock, call, patch +from unittest.mock import Mock, patch import pytest -from universal_silabs_flasher.const import ApplicationType from homeassistant.components import usb from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_ZIGBEE, +) from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( CONF_DISABLE_MULTI_PAN, get_flasher_addon_manager, get_multiprotocol_addon_manager, ) -from homeassistant.components.homeassistant_sky_connect.config_flow import ( - STEP_PICK_FIRMWARE_THREAD, - STEP_PICK_FIRMWARE_ZIGBEE, -) from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.homeassistant_sky_connect.util import ( - get_otbr_addon_manager, - get_zigbee_flasher_addon_manager, -) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -49,86 +39,6 @@ USB_DATA_ZBT1 = usb.UsbServiceInfo( ) -def delayed_side_effect() -> Callable[..., Awaitable[None]]: - """Slows down eager tasks by delaying for an event loop tick.""" - - async def side_effect(*args: Any, **kwargs: Any) -> None: - await asyncio.sleep(0) - - return side_effect - - -@contextlib.contextmanager -def mock_addon_info( - hass: HomeAssistant, - *, - is_hassio: bool = True, - app_type: ApplicationType = ApplicationType.EZSP, - otbr_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), - flasher_addon_info: AddonInfo = AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_INSTALLED, - update_available=False, - version=None, - ), -): - """Mock the main addon states for the config flow.""" - mock_flasher_manager = Mock(spec_set=get_zigbee_flasher_addon_manager(hass)) - mock_flasher_manager.addon_name = "Silicon Labs Flasher" - mock_flasher_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_flasher_manager.async_get_addon_info.return_value = flasher_addon_info - - mock_otbr_manager = Mock(spec_set=get_otbr_addon_manager(hass)) - mock_otbr_manager.addon_name = "OpenThread Border Router" - mock_otbr_manager.async_install_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_uninstall_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_start_addon_waiting = AsyncMock( - side_effect=delayed_side_effect() - ) - mock_otbr_manager.async_get_addon_info.return_value = otbr_addon_info - - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.get_otbr_addon_manager", - return_value=mock_otbr_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.get_zigbee_flasher_addon_manager", - return_value=mock_flasher_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.is_hassio", - return_value=is_hassio, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.config_flow.probe_silabs_firmware_type", - return_value=app_type, - ), - ): - yield mock_otbr_manager, mock_flasher_manager - - @pytest.mark.parametrize( ("usb_data", "model"), [ @@ -136,7 +46,7 @@ def mock_addon_info( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_config_flow_zigbee( +async def test_config_flow( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: """Test the config flow for SkyConnect.""" @@ -146,453 +56,42 @@ async def test_config_flow_zigbee( assert result["type"] is FlowResultType.MENU assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_skip_step_if_installed( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the config flow for SkyConnect, skip installing the addon if necessary.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - flasher_addon_info=AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we skip installation, instead we directly run it - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert result["description_placeholders"]["firmware_type"] == "spinel" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - # Uninstall the addon - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - # Done - await hass.async_block_till_done(wait_background_tasks=True) - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the config flow for SkyConnect.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - assert result["description_placeholders"]["firmware_type"] == "ezsp" - assert result["description_placeholders"]["model"] == model - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "spinel", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_thread_addon_already_installed( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the Thread config flow for SkyConnect, addon is already installed.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - otbr_addon_info=AddonInfo( - available=True, - hostname=None, - options={}, - state=AddonState.NOT_RUNNING, - update_available=False, - version=None, - ), - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_config_flow_zigbee_not_hassio( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test when the stick is used with a non-hassio setup.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": "usb"}, data=usb_data - ) - - with mock_addon_info( - hass, - is_hassio=False, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - - config_entry = result["result"] - assert config_entry.data == { - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - } - - # Ensure a ZHA discovery flow has been created - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - zha_flow = flows[0] - assert zha_flow["handler"] == "zha" - assert zha_flow["context"]["source"] == "hardware" - assert zha_flow["step_id"] == "confirm" - - -@pytest.mark.parametrize( - ("usb_data", "model"), - [ - (USB_DATA_SKY, "Home Assistant SkyConnect"), - (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), - ], -) -async def test_options_flow_zigbee_to_thread( - usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant -) -> None: - """Test the options flow for SkyConnect, migrating Zigbee to Thread.""" - config_entry = MockConfigEntry( - domain="homeassistant_sky_connect", - data={ - "firmware": "ezsp", - "device": usb_data.device, - "manufacturer": usb_data.manufacturer, - "pid": usb_data.pid, - "description": usb_data.description, - "product": usb_data.description, - "serial_number": usb_data.serial_number, - "vid": usb_data.vid, - }, - version=1, - minor_version=2, - ) - config_entry.add_to_hass(hass) - - assert await hass.config_entries.async_setup(config_entry.entry_id) - - # First step is confirmation - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - assert result["step_id"] == "pick_firmware" - assert result["description_placeholders"]["firmware_type"] == "ezsp" assert result["description_placeholders"]["model"] == model - with mock_addon_info( - hass, - app_type=ApplicationType.EZSP, - ) as (mock_otbr_manager, mock_flasher_manager): - result = await hass.config_entries.options.async_configure( + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareConfigFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): + result = await hass.config_entries.flow.async_configure( result["flow_id"], - user_input={"next_step_id": STEP_PICK_FIRMWARE_THREAD}, + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_otbr_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - mock_otbr_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={ - "device": "", - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - }, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.2.3", - ) - - # Progress the flow, it is now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_otbr_addon" - assert result["progress_action"] == "start_otbr_addon" - - assert mock_otbr_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 460800, - "flow_control": True, - "autoflash_firmware": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # The addon is now running - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_otbr" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) assert result["type"] is FlowResultType.CREATE_ENTRY - # The firmware type has been updated - assert config_entry.data["firmware"] == "spinel" + config_entry = result["result"] + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } + + # Ensure a ZHA discovery flow has been created + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + zha_flow = flows[0] + assert zha_flow["handler"] == "zha" + assert zha_flow["context"]["source"] == "hardware" + assert zha_flow["step_id"] == "confirm" @pytest.mark.parametrize( @@ -602,10 +101,10 @@ async def test_options_flow_zigbee_to_thread( (USB_DATA_ZBT1, "Home Assistant Connect ZBT-1"), ], ) -async def test_options_flow_thread_to_zigbee( +async def test_options_flow( usb_data: usb.UsbServiceInfo, model: str, hass: HomeAssistant ) -> None: - """Test the options flow for SkyConnect, migrating Thread to Zigbee.""" + """Test the options flow for SkyConnect.""" config_entry = MockConfigEntry( domain="homeassistant_sky_connect", data={ @@ -632,62 +131,32 @@ async def test_options_flow_thread_to_zigbee( assert result["description_placeholders"]["firmware_type"] == "spinel" assert result["description_placeholders"]["model"] == model - with mock_addon_info( - hass, - app_type=ApplicationType.SPINEL, - ) as (mock_otbr_manager, mock_flasher_manager): - # Pick the menu option: we are now installing the addon + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["progress_action"] == "install_addon" - assert result["step_id"] == "install_zigbee_flasher_addon" - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now configuring the addon and running it - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "run_zigbee_flasher_addon" - assert result["progress_action"] == "run_zigbee_flasher_addon" - assert mock_flasher_manager.async_set_addon_options.mock_calls == [ - call( - { - "device": usb_data.device, - "baudrate": 115200, - "bootloader_baudrate": 115200, - "flow_control": True, - } - ) - ] - - await hass.async_block_till_done(wait_background_tasks=True) - - # Progress the flow, we are now uninstalling the addon - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "uninstall_zigbee_flasher_addon" - assert result["progress_action"] == "uninstall_zigbee_flasher_addon" - - await hass.async_block_till_done(wait_background_tasks=True) - - # We are finally done with the addon - assert mock_flasher_manager.async_uninstall_addon_waiting.mock_calls == [call()] - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "confirm_zigbee" - - # We are now done - result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={} - ) assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"] is True - # The firmware type has been updated - assert config_entry.data["firmware"] == "ezsp" + assert config_entry.data == { + "firmware": "ezsp", + "device": usb_data.device, + "manufacturer": usb_data.manufacturer, + "pid": usb_data.pid, + "description": usb_data.description, + "product": usb_data.description, + "serial_number": usb_data.serial_number, + "vid": usb_data.vid, + } @pytest.mark.parametrize( diff --git a/tests/components/homeassistant_sky_connect/test_hardware.py b/tests/components/homeassistant_sky_connect/test_hardware.py index 888ed27a3c0..f39e648b0f2 100644 --- a/tests/components/homeassistant_sky_connect/test_hardware.py +++ b/tests/components/homeassistant_sky_connect/test_hardware.py @@ -1,7 +1,8 @@ """Test the Home Assistant SkyConnect hardware platform.""" from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_init.py b/tests/components/homeassistant_sky_connect/test_init.py index 88b57f2dd64..e1c13771fdc 100644 --- a/tests/components/homeassistant_sky_connect/test_init.py +++ b/tests/components/homeassistant_sky_connect/test_init.py @@ -4,8 +4,8 @@ from unittest.mock import patch from universal_silabs_flasher.const import ApplicationType +from homeassistant.components.homeassistant_hardware.util import FirmwareGuess from homeassistant.components.homeassistant_sky_connect.const import DOMAIN -from homeassistant.components.homeassistant_sky_connect.util import FirmwareGuess from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry diff --git a/tests/components/homeassistant_sky_connect/test_util.py b/tests/components/homeassistant_sky_connect/test_util.py index b560acc65b7..1d1d70c1b4c 100644 --- a/tests/components/homeassistant_sky_connect/test_util.py +++ b/tests/components/homeassistant_sky_connect/test_util.py @@ -1,24 +1,14 @@ """Test SkyConnect utilities.""" -from unittest.mock import AsyncMock, patch - -from universal_silabs_flasher.const import ApplicationType - -from homeassistant.components.hassio import AddonError, AddonInfo, AddonState from homeassistant.components.homeassistant_sky_connect.const import ( DOMAIN, HardwareVariant, ) from homeassistant.components.homeassistant_sky_connect.util import ( - FirmwareGuess, get_hardware_variant, get_usb_service_info, - get_zha_device_path, - guess_firmware_type, ) from homeassistant.components.usb import UsbServiceInfo -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -52,20 +42,6 @@ CONNECT_ZBT1_CONFIG_ENTRY = MockConfigEntry( version=2, ) -ZHA_CONFIG_ENTRY = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={ - "device": { - "path": "/dev/serial/by-id/usb-Nabu_Casa_Home_Assistant_Connect_ZBT-1_3c0ed67c628beb11b1cd64a0f320645d-if00-port0", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - }, - version=4, -) - def test_get_usb_service_info() -> None: """Test `get_usb_service_info` conversion.""" @@ -85,131 +61,3 @@ def test_get_hardware_variant() -> None: assert ( get_hardware_variant(CONNECT_ZBT1_CONFIG_ENTRY) == HardwareVariant.CONNECT_ZBT1 ) - - -def test_get_zha_device_path() -> None: - """Test extracting the ZHA device path from its config entry.""" - assert ( - get_zha_device_path(ZHA_CONFIG_ENTRY) == ZHA_CONFIG_ENTRY.data["device"]["path"] - ) - - -def test_get_zha_device_path_ignored_discovery() -> None: - """Test extracting the ZHA device path from an ignored ZHA discovery.""" - config_entry = MockConfigEntry( - domain="zha", - unique_id="some_unique_id", - data={}, - version=4, - ) - - assert get_zha_device_path(config_entry) is None - - -async def test_guess_firmware_type_unknown(hass: HomeAssistant) -> None: - """Test guessing the firmware type.""" - - assert (await guess_firmware_type(hass, "/dev/missing")) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="unknown" - ) - - -async def test_guess_firmware_type(hass: HomeAssistant) -> None: - """Test guessing the firmware.""" - path = ZHA_CONFIG_ENTRY.data["device"]["path"] - - ZHA_CONFIG_ENTRY.add_to_hass(hass) - - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.NOT_LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=False, firmware_type=ApplicationType.EZSP, source="zha" - ) - - # When ZHA is running, we indicate as such when guessing - ZHA_CONFIG_ENTRY.mock_state(hass, ConfigEntryState.LOADED) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager = AsyncMock() - mock_multipan_addon_manager = AsyncMock() - - with ( - patch( - "homeassistant.components.homeassistant_sky_connect.util.is_hassio", - return_value=True, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.util.get_otbr_addon_manager", - return_value=mock_otbr_addon_manager, - ), - patch( - "homeassistant.components.homeassistant_sky_connect.util.get_multiprotocol_addon_manager", - return_value=mock_multipan_addon_manager, - ), - ): - mock_otbr_addon_manager.async_get_addon_info.side_effect = AddonError() - mock_multipan_addon_manager.async_get_addon_info.side_effect = AddonError() - - # Hassio errors are ignored and we still go with ZHA - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.side_effect = None - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": "/some/other/device"}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will prefer ZHA, as it is running (and actually pointing to the device) - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.NOT_RUNNING, - update_available=False, - version="1.0.0", - ) - - # We will still prefer ZHA, as it is the one actually running - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.EZSP, source="zha" - ) - - mock_otbr_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Finally, ZHA loses out to OTBR - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.SPINEL, source="otbr" - ) - - mock_multipan_addon_manager.async_get_addon_info.side_effect = None - mock_multipan_addon_manager.async_get_addon_info.return_value = AddonInfo( - available=True, - hostname=None, - options={"device": path}, - state=AddonState.RUNNING, - update_available=False, - version="1.0.0", - ) - - # Which will lose out to multi-PAN - assert (await guess_firmware_type(hass, path)) == FirmwareGuess( - is_running=True, firmware_type=ApplicationType.CPC, source="multiprotocol" - ) diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 38398eb719f..0077fb27058 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -1,17 +1,17 @@ """Test fixtures for the Home Assistant Yellow integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: """Mock the radio connection and probing of the ZHA config flow.""" - def mock_probe(config: dict[str, Any]) -> None: + def mock_probe(config: dict[str, Any]) -> dict[str, Any]: # The radio probing will return the correct baudrate return {**config, "baudrate": 115200} diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 4ae04180a64..95d7df89c9d 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -1,9 +1,9 @@ """Test the Home Assistant Yellow config flow.""" +from collections.abc import Generator from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN from homeassistant.components.homeassistant_yellow.const import DOMAIN diff --git a/tests/components/homekit/conftest.py b/tests/components/homekit/conftest.py index 26333b0b807..6bdad5d2b4c 100644 --- a/tests/components/homekit/conftest.py +++ b/tests/components/homekit/conftest.py @@ -4,7 +4,6 @@ from asyncio import AbstractEventLoop from collections.abc import Generator from contextlib import suppress import os -from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -13,13 +12,13 @@ from homeassistant.components.device_tracker.legacy import YAML_DEVICES from homeassistant.components.homekit.accessories import HomeDriver from homeassistant.components.homekit.const import BRIDGE_NAME, EVENT_HOMEKIT_CHANGED from homeassistant.components.homekit.iidmanager import AccessoryIIDStorage -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_capture_events @pytest.fixture -def iid_storage(hass): +def iid_storage(hass: HomeAssistant) -> Generator[AccessoryIIDStorage]: """Mock the iid storage.""" with patch.object(AccessoryIIDStorage, "_async_schedule_save"): yield AccessoryIIDStorage(hass, "") @@ -28,7 +27,7 @@ def iid_storage(hass): @pytest.fixture def run_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init. This mock does not mock async_stop, so the driver will not be stopped @@ -57,7 +56,7 @@ def run_driver( @pytest.fixture def hk_driver( hass: HomeAssistant, event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -89,7 +88,7 @@ def mock_hap( event_loop: AbstractEventLoop, iid_storage: AccessoryIIDStorage, mock_zeroconf: MagicMock, -) -> Generator[HomeDriver, Any, None]: +) -> Generator[HomeDriver]: """Return a custom AccessoryDriver instance for HomeKit accessory init.""" with ( patch("pyhap.accessory_driver.AsyncZeroconf"), @@ -122,13 +121,13 @@ def mock_hap( @pytest.fixture -def events(hass): +def events(hass: HomeAssistant) -> list[Event]: """Yield caught homekit_changed events.""" return async_capture_events(hass, EVENT_HOMEKIT_CHANGED) @pytest.fixture -def demo_cleanup(hass): +def demo_cleanup(hass: HomeAssistant) -> Generator[None]: """Clean up device tracker demo file.""" yield with suppress(FileNotFoundError): diff --git a/tests/components/homekit/test_accessories.py b/tests/components/homekit/test_accessories.py index 32cd6622492..c37cac84b8a 100644 --- a/tests/components/homekit/test_accessories.py +++ b/tests/components/homekit/test_accessories.py @@ -47,7 +47,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, __version__ as hass_version, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service @@ -667,7 +667,9 @@ async def test_battery_appears_after_startup( assert acc._char_battery is None -async def test_call_service(hass: HomeAssistant, hk_driver, events) -> None: +async def test_call_service( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test call_service method.""" entity_id = "homekit.accessory" hass.states.async_set(entity_id, None) diff --git a/tests/components/homekit/test_diagnostics.py b/tests/components/homekit/test_diagnostics.py index 728624da0d0..ce3c954c447 100644 --- a/tests/components/homekit/test_diagnostics.py +++ b/tests/components/homekit/test_diagnostics.py @@ -12,7 +12,7 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.const import CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .util import async_init_integration @@ -321,9 +321,7 @@ async def test_config_entry_with_trigger_accessory( hass: HomeAssistant, hass_client: ClientSessionGenerator, hk_driver, - events, demo_cleanup, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test generating diagnostics for a bridge config entry with a trigger accessory.""" diff --git a/tests/components/homekit/test_get_accessories.py b/tests/components/homekit/test_get_accessories.py index 02a39ed9258..c4b1cbe98d8 100644 --- a/tests/components/homekit/test_get_accessories.py +++ b/tests/components/homekit/test_get_accessories.py @@ -335,10 +335,10 @@ def test_type_sensors(type_name, entity_id, state, attrs) -> None: ("SelectSwitch", "select.test", "option1", {}, {}), ("Switch", "switch.test", "on", {}, {}), ("Switch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SWITCH}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), - ("Valve", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_FAUCET}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_VALVE}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SHOWER}), + ("ValveSwitch", "switch.test", "on", {}, {CONF_TYPE: TYPE_SPRINKLER}), ], ) def test_type_switches(type_name, entity_id, state, attrs, config) -> None: @@ -350,6 +350,21 @@ def test_type_switches(type_name, entity_id, state, attrs, config) -> None: assert mock_type.called +@pytest.mark.parametrize( + ("type_name", "entity_id", "state", "attrs"), + [ + ("Valve", "valve.test", "on", {}), + ], +) +def test_type_valve(type_name, entity_id, state, attrs) -> None: + """Test if valve types are associated correctly.""" + mock_type = Mock() + with patch.dict(TYPES, {type_name: mock_type}): + entity_state = State(entity_id, state, attrs) + get_accessory(None, None, entity_state, 2, {}) + assert mock_type.called + + @pytest.mark.parametrize( ("type_name", "entity_id", "state", "attrs"), [ diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 33bfc6e66d3..93458724c5e 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -14,6 +14,7 @@ import pytest from homeassistant import config as hass_config from homeassistant.components import homekit as homekit_base, zeroconf from homeassistant.components.binary_sensor import BinarySensorDeviceClass +from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit import ( MAX_DEVICES, STATUS_READY, @@ -58,7 +59,8 @@ from homeassistant.const import ( STATE_ON, EntityCategory, ) -from homeassistant.core import HomeAssistant, HomeAssistantError, State +from homeassistant.core import HomeAssistant, State +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -1841,7 +1843,11 @@ async def test_homekit_uses_system_zeroconf(hass: HomeAssistant, hk_driver) -> N entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] + # New tests should not access runtime data. + # Do not use this pattern for new tests. + entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( + entry.entry_id + ).runtime_data assert entry_data.homekit.driver.advertiser == system_async_zc assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -1938,12 +1944,21 @@ async def test_homekit_ignored_missing_devices( ) +@pytest.mark.parametrize( + ("domain", "device_class"), + [ + ("binary_sensor", BinarySensorDeviceClass.MOTION), + ("event", EventDeviceClass.MOTION), + ], +) @pytest.mark.usefixtures("mock_async_zeroconf") async def test_homekit_finds_linked_motion_sensors( hass: HomeAssistant, hk_driver, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + domain: str, + device_class: EventDeviceClass | BinarySensorDeviceClass, ) -> None: """Test HomeKit start method.""" entry = await async_init_integration(hass) @@ -1963,21 +1978,21 @@ async def test_homekit_finds_linked_motion_sensors( connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, ) - binary_motion_sensor = entity_registry.async_get_or_create( - "binary_sensor", + entry = entity_registry.async_get_or_create( + domain, "camera", "motion_sensor", device_id=device_entry.id, - original_device_class=BinarySensorDeviceClass.MOTION, + original_device_class=device_class, ) camera = entity_registry.async_get_or_create( "camera", "camera", "demo", device_id=device_entry.id ) hass.states.async_set( - binary_motion_sensor.entity_id, + entry.entity_id, STATE_ON, - {ATTR_DEVICE_CLASS: BinarySensorDeviceClass.MOTION}, + {ATTR_DEVICE_CLASS: device_class}, ) hass.states.async_set(camera.entity_id, STATE_ON) @@ -2000,7 +2015,83 @@ async def test_homekit_finds_linked_motion_sensors( "model": "Camera Server", "platform": "test", "sw_version": "0.16.0", - "linked_motion_sensor": "binary_sensor.camera_motion_sensor", + "linked_motion_sensor": entry.entity_id, + }, + ) + + +@pytest.mark.parametrize( + ("domain", "device_class"), + [ + ("binary_sensor", BinarySensorDeviceClass.OCCUPANCY), + ("event", EventDeviceClass.DOORBELL), + ], +) +@pytest.mark.usefixtures("mock_async_zeroconf") +async def test_homekit_finds_linked_doorbell_sensors( + hass: HomeAssistant, + hk_driver, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + domain: str, + device_class: EventDeviceClass | BinarySensorDeviceClass, +) -> None: + """Test homekit can find linked doorbell sensors.""" + entry = await async_init_integration(hass) + + homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) + + homekit.driver = hk_driver + homekit.bridge = HomeBridge(hass, hk_driver, "mock_bridge") + + config_entry = MockConfigEntry(domain="test", data={}) + config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + sw_version="0.16.0", + model="Camera Server", + manufacturer="Ubq", + connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, + ) + + entry = entity_registry.async_get_or_create( + domain, + "camera", + "doorbell_sensor", + device_id=device_entry.id, + original_device_class=device_class, + ) + camera = entity_registry.async_get_or_create( + "camera", "camera", "demo", device_id=device_entry.id + ) + + hass.states.async_set( + entry.entity_id, + STATE_ON, + {ATTR_DEVICE_CLASS: device_class}, + ) + hass.states.async_set(camera.entity_id, STATE_ON) + + with ( + patch.object(homekit.bridge, "add_accessory"), + patch(f"{PATH_HOMEKIT}.async_show_setup_message"), + patch(f"{PATH_HOMEKIT}.get_accessory") as mock_get_acc, + patch("pyhap.accessory_driver.AccessoryDriver.async_start"), + ): + await homekit.async_start() + await hass.async_block_till_done() + + mock_get_acc.assert_called_with( + hass, + ANY, + ANY, + ANY, + { + "manufacturer": "Ubq", + "model": "Camera Server", + "platform": "test", + "sw_version": "0.16.0", + "linked_doorbell_sensor": entry.entity_id, }, ) diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index 184ce1b6521..a32656e9f2b 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components import camera, ffmpeg from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.camera.img_util import TurboJPEGSingleton +from homeassistant.components.event import EventDeviceClass from homeassistant.components.homekit.accessories import HomeBridge from homeassistant.components.homekit.const import ( AUDIO_CODEC_COPY, @@ -30,10 +31,17 @@ from homeassistant.components.homekit.const import ( ) from homeassistant.components.homekit.type_cameras import Camera from homeassistant.components.homekit.type_switches import Switch -from homeassistant.const import ATTR_DEVICE_CLASS, STATE_OFF, STATE_ON +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component +from homeassistant.util import dt as dt_util from tests.components.camera.common import mock_turbo_jpeg @@ -130,9 +138,7 @@ def _get_failing_mock_ffmpeg(): return ffmpeg -async def test_camera_stream_source_configured( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_configured(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -252,7 +258,7 @@ async def test_camera_stream_source_configured( async def test_camera_stream_source_configured_with_failing_ffmpeg( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source with ffmpeg failing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -308,9 +314,7 @@ async def test_camera_stream_source_configured_with_failing_ffmpeg( await _async_stop_all_streams(hass, acc) -async def test_camera_stream_source_found( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_found(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream and we get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -396,9 +400,7 @@ async def test_camera_stream_source_found( ) -async def test_camera_stream_source_fails( - hass: HomeAssistant, run_driver, events -) -> None: +async def test_camera_stream_source_fails(hass: HomeAssistant, run_driver) -> None: """Test a camera that can stream and we cannot get the source from the entity.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component( @@ -439,7 +441,7 @@ async def test_camera_stream_source_fails( await _async_stop_all_streams(hass, acc) -async def test_camera_with_no_stream(hass: HomeAssistant, run_driver, events) -> None: +async def test_camera_with_no_stream(hass: HomeAssistant, run_driver) -> None: """Test a camera that cannot stream.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) await async_setup_component(hass, camera.DOMAIN, {camera.DOMAIN: {}}) @@ -472,7 +474,7 @@ async def test_camera_with_no_stream(hass: HomeAssistant, run_driver, events) -> async def test_camera_stream_source_configured_and_copy_codec( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -547,7 +549,7 @@ async def test_camera_stream_source_configured_and_copy_codec( async def test_camera_stream_source_configured_and_override_profile_names( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source over overridden profile names.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -623,7 +625,7 @@ async def test_camera_stream_source_configured_and_override_profile_names( async def test_camera_streaming_fails_after_starting_ffmpeg( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera that can stream with a configured source.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -700,7 +702,7 @@ async def test_camera_streaming_fails_after_starting_ffmpeg( async def test_camera_with_linked_motion_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a linked motion sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -793,8 +795,151 @@ async def test_camera_with_linked_motion_sensor( assert char.value is True +async def test_camera_with_linked_motion_event(hass: HomeAssistant, run_driver) -> None: + """Test a camera with a linked motion event entity can update.""" + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + await async_setup_component( + hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} + ) + await hass.async_block_till_done() + motion_entity_id = "event.motion" + + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + entity_id = "camera.demo_camera" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Camera( + hass, + run_driver, + "Camera", + entity_id, + 2, + { + CONF_STREAM_SOURCE: "/dev/null", + CONF_SUPPORT_AUDIO: True, + CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, + CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, + CONF_LINKED_MOTION_SENSOR: motion_entity_id, + }, + ) + bridge = HomeBridge("hass", run_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 17 # Camera + + service = acc.get_service(SERV_MOTION_SENSOR) + assert service + char = service.get_characteristic(CHAR_MOTION_DETECTED) + assert char + + assert char.value is False + broker = MagicMock() + char.broker = broker + + hass.states.async_set( + motion_entity_id, STATE_UNKNOWN, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + assert char.value is False + + char.set_value(True) + fire_time = dt_util.utcnow().isoformat() + hass.states.async_set( + motion_entity_id, fire_time, {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION} + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 4 + broker.reset_mock() + assert char.value is False + + hass.states.async_set( + motion_entity_id, + fire_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + force_update=True, + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + motion_entity_id, + fire_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert len(broker.mock_calls) == 0 + broker.reset_mock() + # Ensure we do not throw when the linked + # motion sensor is removed + hass.states.async_remove(motion_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is False + + # Ensure re-adding does not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + # Now go unavailable + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # Going from unavailable to a state should not fire an event + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a another update does + broker.reset_mock() + hass.states.async_set( + motion_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.MOTION, "other": "attr"}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + async def test_camera_with_a_missing_linked_motion_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a configured linked motion sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -826,7 +971,7 @@ async def test_camera_with_a_missing_linked_motion_sensor( async def test_camera_with_linked_doorbell_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a linked doorbell sensor can update.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) @@ -941,8 +1086,154 @@ async def test_camera_with_linked_doorbell_sensor( assert char2.value is None +async def test_camera_with_linked_doorbell_event( + hass: HomeAssistant, run_driver +) -> None: + """Test a camera with a linked doorbell event can update.""" + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + await async_setup_component( + hass, camera.DOMAIN, {camera.DOMAIN: {"platform": "demo"}} + ) + await hass.async_block_till_done() + doorbell_entity_id = "event.doorbell" + + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + entity_id = "camera.demo_camera" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + acc = Camera( + hass, + run_driver, + "Camera", + entity_id, + 2, + { + CONF_STREAM_SOURCE: "/dev/null", + CONF_SUPPORT_AUDIO: True, + CONF_VIDEO_CODEC: VIDEO_CODEC_H264_OMX, + CONF_AUDIO_CODEC: AUDIO_CODEC_COPY, + CONF_LINKED_DOORBELL_SENSOR: doorbell_entity_id, + }, + ) + bridge = HomeBridge("hass", run_driver, "Test Bridge") + bridge.add_accessory(acc) + + acc.run() + + assert acc.aid == 2 + assert acc.category == 17 # Camera + + service = acc.get_service(SERV_DOORBELL) + assert service + char = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char + + assert char.value is None + + service2 = acc.get_service(SERV_STATELESS_PROGRAMMABLE_SWITCH) + assert service2 + char2 = service.get_characteristic(CHAR_PROGRAMMABLE_SWITCH_EVENT) + assert char2 + broker = MagicMock() + char2.broker = broker + assert char2.value is None + + hass.states.async_set( + doorbell_entity_id, + STATE_UNKNOWN, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + + char.set_value(True) + char2.set_value(True) + broker.reset_mock() + + original_time = dt_util.utcnow().isoformat() + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 2 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + force_update=True, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + hass.states.async_set( + doorbell_entity_id, + original_time, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL, "other": "attr"}, + ) + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + assert len(broker.mock_calls) == 0 + broker.reset_mock() + + # Ensure we do not throw when the linked + # doorbell sensor is removed + hass.states.async_remove(doorbell_entity_id) + await hass.async_block_till_done() + acc.run() + await hass.async_block_till_done() + assert char.value is None + assert char2.value is None + + await hass.async_block_till_done() + hass.states.async_set( + doorbell_entity_id, + STATE_UNAVAILABLE, + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + # Ensure re-adding does not fire an event + assert not broker.mock_calls + broker.reset_mock() + + # going from unavailable to a state should not fire an event + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert not broker.mock_calls + + # But a second update does + hass.states.async_set( + doorbell_entity_id, + dt_util.utcnow().isoformat(), + {ATTR_DEVICE_CLASS: EventDeviceClass.DOORBELL}, + ) + await hass.async_block_till_done() + assert broker.mock_calls + + async def test_camera_with_a_missing_linked_doorbell_sensor( - hass: HomeAssistant, run_driver, events + hass: HomeAssistant, run_driver ) -> None: """Test a camera with a configured linked doorbell sensor that is missing.""" await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index 6efd9118092..b3125c6581c 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -40,13 +40,15 @@ from homeassistant.const import ( STATE_UNAVAILABLE, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_garage_door_open_close(hass: HomeAssistant, hk_driver, events) -> None: +async def test_garage_door_open_close( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.garage_door" @@ -132,9 +134,7 @@ async def test_garage_door_open_close(hass: HomeAssistant, hk_driver, events) -> assert events[-1].data[ATTR_VALUE] is None -async def test_door_instantiate_set_position( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_door_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: """Test if Door accessory is instantiated correctly and can set position.""" entity_id = "cover.door" @@ -185,7 +185,7 @@ async def test_door_instantiate_set_position( async def test_windowcovering_set_cover_position( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -295,9 +295,7 @@ async def test_windowcovering_set_cover_position( assert events[-1].data[ATTR_VALUE] == 75 -async def test_window_instantiate_set_position( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_window_instantiate_set_position(hass: HomeAssistant, hk_driver) -> None: """Test if Window accessory is instantiated correctly and can set position.""" entity_id = "cover.window" @@ -348,7 +346,7 @@ async def test_window_instantiate_set_position( async def test_windowcovering_cover_set_tilt( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA update slat tilt accordingly.""" entity_id = "cover.window" @@ -418,7 +416,7 @@ async def test_windowcovering_cover_set_tilt( assert events[-1].data[ATTR_VALUE] == 75 -async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver, events) -> None: +async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver) -> None: """Test we lock the window covering closed when its tilt only.""" entity_id = "cover.window" @@ -442,7 +440,7 @@ async def test_windowcovering_tilt_only(hass: HomeAssistant, hk_driver, events) async def test_windowcovering_open_close( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -525,7 +523,7 @@ async def test_windowcovering_open_close( async def test_windowcovering_open_close_stop( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.window" @@ -574,7 +572,7 @@ async def test_windowcovering_open_close_stop( async def test_windowcovering_open_close_with_position_and_stop( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "cover.stop_window" @@ -608,7 +606,7 @@ async def test_windowcovering_open_close_with_position_and_stop( async def test_windowcovering_basic_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -646,7 +644,7 @@ async def test_windowcovering_basic_restore( async def test_windowcovering_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event entity_registry.""" hass.set_state(CoreState.not_running) @@ -684,7 +682,7 @@ async def test_windowcovering_restore( async def test_garage_door_with_linked_obstruction_sensor( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if accessory and HA are updated accordingly with a linked obstruction sensor.""" linked_obstruction_sensor_entity_id = "binary_sensor.obstruction" diff --git a/tests/components/homekit/test_type_fans.py b/tests/components/homekit/test_type_fans.py index d971b8c06d2..1808767c614 100644 --- a/tests/components/homekit/test_type_fans.py +++ b/tests/components/homekit/test_type_fans.py @@ -24,13 +24,13 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_fan_basic(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test fan with char state.""" entity_id = "fan.demo" @@ -108,7 +108,9 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] is None -async def test_fan_direction(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_direction( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with direction.""" entity_id = "fan.demo" @@ -186,7 +188,9 @@ async def test_fan_direction(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == DIRECTION_REVERSE -async def test_fan_oscillate(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_oscillate( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with oscillate.""" entity_id = "fan.demo" @@ -259,7 +263,7 @@ async def test_fan_oscillate(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] is True -async def test_fan_speed(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -361,7 +365,9 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events) -> None: assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id -async def test_fan_set_all_one_shot(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_set_all_one_shot( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with speed.""" entity_id = "fan.demo" @@ -555,7 +561,7 @@ async def test_fan_set_all_one_shot(hass: HomeAssistant, hk_driver, events) -> N async def test_fan_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -597,7 +603,7 @@ async def test_fan_restore( async def test_fan_multiple_preset_modes( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test fan with multiple preset modes.""" entity_id = "fan.demo" @@ -678,7 +684,9 @@ async def test_fan_multiple_preset_modes( assert len(events) == 2 -async def test_fan_single_preset_mode(hass: HomeAssistant, hk_driver, events) -> None: +async def test_fan_single_preset_mode( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test fan with a single preset mode.""" entity_id = "fan.demo" diff --git a/tests/components/homekit/test_type_humidifiers.py b/tests/components/homekit/test_type_humidifiers.py index fdd01e05a91..fbb72333c9b 100644 --- a/tests/components/homekit/test_type_humidifiers.py +++ b/tests/components/homekit/test_type_humidifiers.py @@ -42,12 +42,12 @@ from homeassistant.const import ( STATE_ON, STATE_UNAVAILABLE, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_humidifier(hass: HomeAssistant, hk_driver, events) -> None: +async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if humidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -132,7 +132,9 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == "RelativeHumidityHumidifierThreshold to 39.0%" -async def test_dehumidifier(hass: HomeAssistant, hk_driver, events) -> None: +async def test_dehumidifier( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if dehumidifier accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -220,7 +222,9 @@ async def test_dehumidifier(hass: HomeAssistant, hk_driver, events) -> None: ) -async def test_hygrostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_hygrostat_power_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "humidifier.test" @@ -301,7 +305,7 @@ async def test_hygrostat_power_state(hass: HomeAssistant, hk_driver, events) -> async def test_hygrostat_get_humidity_range( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if humidity range is evaluated correctly.""" entity_id = "humidifier.test" @@ -452,7 +456,10 @@ async def test_humidifier_with_a_missing_linked_humidity_sensor( async def test_humidifier_as_dehumidifier( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" @@ -495,7 +502,10 @@ async def test_humidifier_as_dehumidifier( async def test_dehumidifier_as_humidifier( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test an invalid char_target_humidifier_dehumidifier from HomeKit.""" entity_id = "humidifier.test" diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 8d2978fb0bd..02532a91e6d 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -39,7 +39,7 @@ from homeassistant.const import ( STATE_ON, STATE_UNKNOWN, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -53,7 +53,7 @@ async def _wait_for_light_coalesce(hass): await hass.async_block_till_done() -async def test_light_basic(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test light with char state.""" entity_id = "light.demo" @@ -127,7 +127,7 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events) -> None: [[ColorMode.BRIGHTNESS], [ColorMode.HS], [ColorMode.COLOR_TEMP]], ) async def test_light_brightness( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with brightness.""" entity_id = "light.demo" @@ -274,7 +274,9 @@ async def test_light_brightness( assert acc.char_brightness.value == 1 -async def test_light_color_temperature(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_color_temperature( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test light with color temperature.""" entity_id = "light.demo" @@ -323,7 +325,7 @@ async def test_light_color_temperature(hass: HomeAssistant, hk_driver, events) - [["color_temp", "hs"], ["color_temp", "rgb"], ["color_temp", "xy"]], ) async def test_light_color_temperature_and_rgb_color( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with color temperature and rgb color not exposing temperature.""" entity_id = "light.demo" @@ -524,7 +526,7 @@ async def test_light_color_temperature_and_rgb_color( "supported_color_modes", [[ColorMode.HS], [ColorMode.RGB], [ColorMode.XY]] ) async def test_light_rgb_color( - hass: HomeAssistant, hk_driver, events, supported_color_modes + hass: HomeAssistant, hk_driver, events: list[Event], supported_color_modes ) -> None: """Test light with rgb_color.""" entity_id = "light.demo" @@ -578,7 +580,7 @@ async def test_light_rgb_color( async def test_light_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -642,7 +644,7 @@ async def test_light_restore( async def test_light_rgb_with_color_temp( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, turn_on_props_with_brightness, @@ -762,7 +764,7 @@ async def test_light_rgb_with_color_temp( async def test_light_rgbwx_with_color_temp_and_brightness( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, turn_on_props_with_brightness, @@ -824,7 +826,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( async def test_light_rgb_or_w_lights( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGB or W lights.""" entity_id = "light.demo" @@ -957,7 +959,7 @@ async def test_light_rgb_or_w_lights( async def test_light_rgb_with_white_switch_to_temp( hass: HomeAssistant, hk_driver, - events, + events: list[Event], supported_color_modes, state_props, ) -> None: @@ -1034,11 +1036,7 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 -async def test_light_rgb_with_hs_color_none( - hass: HomeAssistant, - hk_driver, - events, -) -> None: +async def test_light_rgb_with_hs_color_none(hass: HomeAssistant, hk_driver) -> None: """Test lights hs color set to None.""" entity_id = "light.demo" @@ -1071,7 +1069,7 @@ async def test_light_rgb_with_hs_color_none( async def test_light_rgbww_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGBWW convert color temp as expected.""" entity_id = "light.demo" @@ -1192,7 +1190,7 @@ async def test_light_rgbww_with_color_temp_conversion( async def test_light_rgbw_with_color_temp_conversion( hass: HomeAssistant, hk_driver, - events, + events: list[Event], ) -> None: """Test lights with RGBW convert color temp as expected.""" entity_id = "light.demo" @@ -1280,7 +1278,7 @@ async def test_light_rgbw_with_color_temp_conversion( async def test_light_set_brightness_and_color( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" @@ -1365,7 +1363,7 @@ async def test_light_set_brightness_and_color( ) -async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver, events) -> None: +async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver) -> None: """Test mireds are forced to ints.""" entity_id = "light.demo" @@ -1386,7 +1384,7 @@ async def test_light_min_max_mireds(hass: HomeAssistant, hk_driver, events) -> N async def test_light_set_brightness_and_color_temp( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test light with all chars in one go.""" entity_id = "light.demo" diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 4d83fe41f48..31f03b1964f 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -18,12 +18,12 @@ from homeassistant.const import ( STATE_UNKNOWN, STATE_UNLOCKED, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_lock_unlock(hass: HomeAssistant, hk_driver, events) -> None: +async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -121,7 +121,9 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events) -> None: @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_code(hass: HomeAssistant, hk_driver, config, events) -> None: +async def test_no_code( + hass: HomeAssistant, hk_driver, config, events: list[Event] +) -> None: """Test accessory if lock doesn't require a code.""" entity_id = "lock.kitchen_door" diff --git a/tests/components/homekit/test_type_media_players.py b/tests/components/homekit/test_type_media_players.py index fb7233e5262..14c21f0a5f5 100644 --- a/tests/components/homekit/test_type_media_players.py +++ b/tests/components/homekit/test_type_media_players.py @@ -40,13 +40,15 @@ from homeassistant.const import ( STATE_PLAYING, STATE_STANDBY, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_media_player_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_media_player_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" config = { CONF_FEATURE_LIST: { @@ -177,7 +179,10 @@ async def test_media_player_set_state(hass: HomeAssistant, hk_driver, events) -> async def test_media_player_television( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test if television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -366,7 +371,7 @@ async def test_media_player_television( async def test_media_player_television_basic( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver, caplog: pytest.LogCaptureFixture ) -> None: """Test if basic television accessory and HA are updated accordingly.""" entity_id = "media_player.television" @@ -409,7 +414,7 @@ async def test_media_player_television_basic( async def test_media_player_television_supports_source_select_no_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if basic tv that supports source select but is missing a source list.""" entity_id = "media_player.television" @@ -429,7 +434,7 @@ async def test_media_player_television_supports_source_select_no_sources( async def test_tv_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -482,7 +487,7 @@ async def test_tv_restore( async def test_media_player_television_max_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if television accessory that reaches the maximum number of sources.""" entity_id = "media_player.television" @@ -541,7 +546,7 @@ async def test_media_player_television_max_sources( async def test_media_player_television_duplicate_sources( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver ) -> None: """Test if television accessory with duplicate sources.""" entity_id = "media_player.television" @@ -586,7 +591,7 @@ async def test_media_player_television_duplicate_sources( async def test_media_player_television_unsafe_chars( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if television accessory with unsafe characters.""" entity_id = "media_player.television" diff --git a/tests/components/homekit/test_type_remote.py b/tests/components/homekit/test_type_remote.py index bd4ead58a7b..dedf3ae34db 100644 --- a/tests/components/homekit/test_type_remote.py +++ b/tests/components/homekit/test_type_remote.py @@ -26,13 +26,13 @@ from homeassistant.const import ( STATE_ON, STATE_STANDBY, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service async def test_activity_remote( - hass: HomeAssistant, hk_driver: HomeDriver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, hk_driver: HomeDriver, events: list[Event] ) -> None: """Test if remote accessory and HA are updated accordingly.""" entity_id = "remote.harmony" @@ -156,7 +156,10 @@ async def test_activity_remote( async def test_activity_remote_bad_names( - hass: HomeAssistant, hk_driver, events, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hk_driver, + events: list[Event], + caplog: pytest.LogCaptureFixture, ) -> None: """Test if remote accessory with invalid names works as expected.""" entity_id = "remote.harmony" diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 18434a345ce..27580949ec2 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -21,12 +21,14 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, STATE_UNKNOWN, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from tests.common import async_mock_service -async def test_switch_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_switch_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" code = "1234" config = {ATTR_CODE: code} @@ -118,7 +120,9 @@ async def test_switch_set_state(hass: HomeAssistant, hk_driver, events) -> None: @pytest.mark.parametrize("config", [{}, {ATTR_CODE: None}]) -async def test_no_alarm_code(hass: HomeAssistant, hk_driver, config, events) -> None: +async def test_no_alarm_code( + hass: HomeAssistant, hk_driver, config, events: list[Event] +) -> None: """Test accessory if security_system doesn't require an alarm_code.""" entity_id = "alarm_control_panel.test" @@ -139,7 +143,7 @@ async def test_no_alarm_code(hass: HomeAssistant, hk_driver, config, events) -> assert events[-1].data[ATTR_VALUE] is None -async def test_arming(hass: HomeAssistant, hk_driver, events) -> None: +async def test_arming(hass: HomeAssistant, hk_driver) -> None: """Test to make sure arming sets the right state.""" entity_id = "alarm_control_panel.test" @@ -190,7 +194,7 @@ async def test_arming(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_current_state.value == 4 -async def test_supported_states(hass: HomeAssistant, hk_driver, events) -> None: +async def test_supported_states(hass: HomeAssistant, hk_driver) -> None: """Test different supported states.""" code = "1234" config = {ATTR_CODE: code} diff --git a/tests/components/homekit/test_type_sensors.py b/tests/components/homekit/test_type_sensors.py index fc68b7c8ecf..3e8e05fdcfd 100644 --- a/tests/components/homekit/test_type_sensors.py +++ b/tests/components/homekit/test_type_sensors.py @@ -213,6 +213,16 @@ async def test_pm25(hass: HomeAssistant, hk_driver) -> None: assert acc.char_density.value == 0 assert acc.char_quality.value == 0 + hass.states.async_set(entity_id, "8") + await hass.async_block_till_done() + assert acc.char_density.value == 8 + assert acc.char_quality.value == 1 + + hass.states.async_set(entity_id, "12") + await hass.async_block_till_done() + assert acc.char_density.value == 12 + assert acc.char_quality.value == 2 + hass.states.async_set(entity_id, "23") await hass.async_block_till_done() assert acc.char_density.value == 23 @@ -601,7 +611,7 @@ async def test_binary_device_classes(hass: HomeAssistant, hk_driver) -> None: async def test_sensor_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) diff --git a/tests/components/homekit/test_type_switches.py b/tests/components/homekit/test_type_switches.py index 27937babc57..9b708f18b8a 100644 --- a/tests/components/homekit/test_type_switches.py +++ b/tests/components/homekit/test_type_switches.py @@ -17,6 +17,7 @@ from homeassistant.components.homekit.type_switches import ( Switch, Vacuum, Valve, + ValveSwitch, ) from homeassistant.components.select import ATTR_OPTIONS from homeassistant.components.vacuum import ( @@ -33,17 +34,23 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, CONF_TYPE, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, SERVICE_SELECT_OPTION, + STATE_CLOSED, STATE_OFF, STATE_ON, + STATE_OPEN, ) -from homeassistant.core import HomeAssistant, split_entity_id +from homeassistant.core import Event, HomeAssistant, split_entity_id import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def test_outlet_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_outlet_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if Outlet accessory and HA are updated accordingly.""" entity_id = "switch.outlet_test" @@ -96,7 +103,7 @@ async def test_outlet_set_state(hass: HomeAssistant, hk_driver, events) -> None: ], ) async def test_switch_set_state( - hass: HomeAssistant, hk_driver, entity_id, attrs, events + hass: HomeAssistant, hk_driver, entity_id, attrs, events: list[Event] ) -> None: """Test if accessory and HA are updated accordingly.""" domain = split_entity_id(entity_id)[0] @@ -140,32 +147,36 @@ async def test_switch_set_state( assert events[-1].data[ATTR_VALUE] is None -async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_valve_switch_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if Valve accessory and HA are updated accordingly.""" entity_id = "switch.valve_test" hass.states.async_set(entity_id, None) await hass.async_block_till_done() - acc = Valve(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 2, {CONF_TYPE: TYPE_FAUCET}) acc.run() await hass.async_block_till_done() assert acc.category == 29 # Faucet assert acc.char_valve_type.value == 3 # Water faucet - acc = Valve(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 3, {CONF_TYPE: TYPE_SHOWER}) acc.run() await hass.async_block_till_done() assert acc.category == 30 # Shower assert acc.char_valve_type.value == 2 # Shower head - acc = Valve(hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER}) + acc = ValveSwitch( + hass, hk_driver, "Valve", entity_id, 4, {CONF_TYPE: TYPE_SPRINKLER} + ) acc.run() await hass.async_block_till_done() assert acc.category == 28 # Sprinkler assert acc.char_valve_type.value == 1 # Irrigation - acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) + acc = ValveSwitch(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) acc.run() await hass.async_block_till_done() @@ -187,8 +198,59 @@ async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_in_use.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, "switch", "turn_on") - call_turn_off = async_mock_service(hass, "switch", "turn_off") + call_turn_on = async_mock_service(hass, "switch", SERVICE_TURN_ON) + call_turn_off = async_mock_service(hass, "switch", SERVICE_TURN_OFF) + + acc.char_active.client_update_value(1) + await hass.async_block_till_done() + assert acc.char_in_use.value == 1 + assert call_turn_on + assert call_turn_on[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 1 + assert events[-1].data[ATTR_VALUE] is None + + acc.char_active.client_update_value(0) + await hass.async_block_till_done() + assert acc.char_in_use.value == 0 + assert call_turn_off + assert call_turn_off[0].data[ATTR_ENTITY_ID] == entity_id + assert len(events) == 2 + assert events[-1].data[ATTR_VALUE] is None + + +async def test_valve_set_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: + """Test if Valve accessory and HA are updated accordingly.""" + entity_id = "valve.valve_test" + + hass.states.async_set(entity_id, None) + await hass.async_block_till_done() + + acc = Valve(hass, hk_driver, "Valve", entity_id, 5, {CONF_TYPE: TYPE_VALVE}) + acc.run() + await hass.async_block_till_done() + + assert acc.aid == 5 + assert acc.category == 29 # Faucet + + assert acc.char_active.value == 0 + assert acc.char_in_use.value == 0 + assert acc.char_valve_type.value == 0 # Generic Valve + + hass.states.async_set(entity_id, STATE_OPEN) + await hass.async_block_till_done() + assert acc.char_active.value == 1 + assert acc.char_in_use.value == 1 + + hass.states.async_set(entity_id, STATE_CLOSED) + await hass.async_block_till_done() + assert acc.char_active.value == 0 + assert acc.char_in_use.value == 0 + + # Set from HomeKit + call_turn_on = async_mock_service(hass, "valve", SERVICE_OPEN_VALVE) + call_turn_off = async_mock_service(hass, "valve", SERVICE_CLOSE_VALVE) acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -208,7 +270,7 @@ async def test_valve_set_state(hass: HomeAssistant, hk_driver, events) -> None: async def test_vacuum_set_state_with_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -277,7 +339,7 @@ async def test_vacuum_set_state_with_returnhome_and_start_support( async def test_vacuum_set_state_without_returnhome_and_start_support( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if Vacuum accessory and HA are updated accordingly.""" entity_id = "vacuum.roomba" @@ -322,7 +384,9 @@ async def test_vacuum_set_state_without_returnhome_and_start_support( assert events[-1].data[ATTR_VALUE] is None -async def test_reset_switch(hass: HomeAssistant, hk_driver, events) -> None: +async def test_reset_switch( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if switch accessory is reset correctly.""" domain = "scene" entity_id = "scene.test" @@ -366,7 +430,9 @@ async def test_reset_switch(hass: HomeAssistant, hk_driver, events) -> None: assert len(events) == 1 -async def test_script_switch(hass: HomeAssistant, hk_driver, events) -> None: +async def test_script_switch( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if script switch accessory is reset correctly.""" domain = "script" entity_id = "script.test" @@ -415,7 +481,7 @@ async def test_script_switch(hass: HomeAssistant, hk_driver, events) -> None: ["input_select", "select"], ) async def test_input_select_switch( - hass: HomeAssistant, hk_driver, events, domain + hass: HomeAssistant, hk_driver, events: list[Event], domain ) -> None: """Test if select switch accessory is handled correctly.""" entity_id = f"{domain}.test" @@ -470,7 +536,9 @@ async def test_input_select_switch( "domain", ["button", "input_button"], ) -async def test_button_switch(hass: HomeAssistant, hk_driver, events, domain) -> None: +async def test_button_switch( + hass: HomeAssistant, hk_driver, events: list[Event], domain +) -> None: """Test switch accessory from a (input) button entity.""" entity_id = f"{domain}.test" diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index ca2a02cb440..3a32e94e491 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -74,13 +74,13 @@ from homeassistant.const import ( STATE_UNKNOWN, UnitOfTemperature, ) -from homeassistant.core import CoreState, HomeAssistant +from homeassistant.core import CoreState, Event, HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import async_mock_service -async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -375,7 +375,9 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events) -> None: assert events[-1].data[ATTR_VALUE] == "TargetHeatingCoolingState to 3" -async def test_thermostat_auto(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_auto( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -509,7 +511,7 @@ async def test_thermostat_auto(hass: HomeAssistant, hk_driver, events) -> None: async def test_thermostat_mode_and_temp_change( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test if accessory where the mode and temp change in the same call.""" entity_id = "climate.test" @@ -616,7 +618,9 @@ async def test_thermostat_mode_and_temp_change( ) -async def test_thermostat_humidity(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_humidity( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly with humidity.""" entity_id = "climate.test" base_attrs = {ATTR_SUPPORTED_FEATURES: 4} @@ -680,7 +684,7 @@ async def test_thermostat_humidity(hass: HomeAssistant, hk_driver, events) -> No async def test_thermostat_humidity_with_target_humidity( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if accessory and HA are updated accordingly with humidity without target hudmidity. @@ -704,7 +708,9 @@ async def test_thermostat_humidity_with_target_humidity( assert acc.char_current_humidity.value == 65 -async def test_thermostat_power_state(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_power_state( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" base_attrs = { @@ -812,7 +818,9 @@ async def test_thermostat_power_state(hass: HomeAssistant, hk_driver, events) -> assert acc.char_target_heat_cool.value == 2 -async def test_thermostat_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: +async def test_thermostat_fahrenheit( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "climate.test" @@ -969,7 +977,7 @@ async def test_thermostat_temperature_step_whole( async def test_thermostat_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1500,7 +1508,7 @@ async def test_thermostat_hvac_modes_without_off( async def test_thermostat_without_target_temp_only_range( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver, events: list[Event] ) -> None: """Test a thermostat that only supports a range.""" entity_id = "climate.test" @@ -1662,7 +1670,9 @@ async def test_thermostat_without_target_temp_only_range( assert events[-1].data[ATTR_VALUE] == "HeatingThresholdTemperature to 27.0°C" -async def test_water_heater(hass: HomeAssistant, hk_driver, events) -> None: +async def test_water_heater( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are updated accordingly.""" entity_id = "water_heater.test" @@ -1736,7 +1746,9 @@ async def test_water_heater(hass: HomeAssistant, hk_driver, events) -> None: assert acc.char_target_heat_cool.value == 1 -async def test_water_heater_fahrenheit(hass: HomeAssistant, hk_driver, events) -> None: +async def test_water_heater_fahrenheit( + hass: HomeAssistant, hk_driver, events: list[Event] +) -> None: """Test if accessory and HA are update accordingly.""" entity_id = "water_heater.test" @@ -1799,7 +1811,7 @@ async def test_water_heater_get_temperature_range( async def test_water_heater_restore( - hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver, events + hass: HomeAssistant, entity_registry: er.EntityRegistry, hk_driver ) -> None: """Test setting up an entity from state in the event registry.""" hass.set_state(CoreState.not_running) @@ -1849,7 +1861,7 @@ async def test_water_heater_restore( async def test_thermostat_with_no_modes_when_we_first_see( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if a thermostat that is not ready when we first see it.""" entity_id = "climate.test" @@ -1903,7 +1915,7 @@ async def test_thermostat_with_no_modes_when_we_first_see( async def test_thermostat_with_no_off_after_recheck( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test if a thermostat that is not ready when we first see it that actually does not have off.""" entity_id = "climate.test" @@ -1956,9 +1968,7 @@ async def test_thermostat_with_no_off_after_recheck( assert mock_reload.called -async def test_thermostat_with_temp_clamps( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_thermostat_with_temp_clamps(hass: HomeAssistant, hk_driver) -> None: """Test that temperatures are clamped to valid values to prevent homekit crash.""" entity_id = "climate.test" base_attrs = { @@ -2013,7 +2023,7 @@ async def test_thermostat_with_temp_clamps( async def test_thermostat_with_fan_modes_with_auto( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes with an auto fan mode.""" entity_id = "climate.test" @@ -2219,7 +2229,7 @@ async def test_thermostat_with_fan_modes_with_auto( async def test_thermostat_with_fan_modes_with_off( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes that can turn off.""" entity_id = "climate.test" @@ -2328,7 +2338,7 @@ async def test_thermostat_with_fan_modes_with_off( async def test_thermostat_with_fan_modes_set_to_none( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes set to None.""" entity_id = "climate.test" @@ -2372,7 +2382,7 @@ async def test_thermostat_with_fan_modes_set_to_none( async def test_thermostat_with_fan_modes_set_to_none_not_supported( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan modes set to None and supported feature missing.""" entity_id = "climate.test" @@ -2415,7 +2425,7 @@ async def test_thermostat_with_fan_modes_set_to_none_not_supported( async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( - hass: HomeAssistant, hk_driver, events + hass: HomeAssistant, hk_driver ) -> None: """Test a thermostate with fan mode and supported feature missing.""" entity_id = "climate.test" @@ -2452,9 +2462,7 @@ async def test_thermostat_with_supported_features_target_temp_but_fan_mode_set( assert not acc.fan_chars -async def test_thermostat_handles_unknown_state( - hass: HomeAssistant, hk_driver, events -) -> None: +async def test_thermostat_handles_unknown_state(hass: HomeAssistant, hk_driver) -> None: """Test a thermostat can handle unknown state.""" entity_id = "climate.test" attrs = { diff --git a/tests/components/homekit/test_type_triggers.py b/tests/components/homekit/test_type_triggers.py index 7471e0bff1c..f7415ef5599 100644 --- a/tests/components/homekit/test_type_triggers.py +++ b/tests/components/homekit/test_type_triggers.py @@ -7,7 +7,7 @@ from homeassistant.components.homekit.const import CHAR_PROGRAMMABLE_SWITCH_EVEN from homeassistant.components.homekit.type_triggers import DeviceTriggerAccessory from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_get_device_automations @@ -16,9 +16,7 @@ from tests.common import MockConfigEntry, async_get_device_automations async def test_programmable_switch_button_fires_on_trigger( hass: HomeAssistant, hk_driver, - events, demo_cleanup, - device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: """Test that DeviceTriggerAccessory fires the programmable switch event on trigger.""" diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 24999242dc1..4939511166f 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -230,14 +230,15 @@ def test_temperature_to_states() -> None: def test_density_to_air_quality() -> None: """Test map PM2.5 density to HomeKit AirQuality level.""" assert density_to_air_quality(0) == 1 - assert density_to_air_quality(12) == 1 - assert density_to_air_quality(12.1) == 2 + assert density_to_air_quality(9) == 1 + assert density_to_air_quality(9.1) == 2 + assert density_to_air_quality(12) == 2 assert density_to_air_quality(35.4) == 2 assert density_to_air_quality(35.5) == 3 assert density_to_air_quality(55.4) == 3 assert density_to_air_quality(55.5) == 4 - assert density_to_air_quality(150.4) == 4 - assert density_to_air_quality(150.5) == 5 + assert density_to_air_quality(125.4) == 4 + assert density_to_air_quality(125.5) == 5 assert density_to_air_quality(200) == 5 @@ -256,7 +257,12 @@ async def test_async_show_setup_msg(hass: HomeAssistant, hk_driver) -> None: hass, entry.entry_id, "bridge_name", pincode, "X-HM://0" ) await hass.async_block_till_done() - entry_data: HomeKitEntryData = hass.data[DOMAIN][entry.entry_id] + + # New tests should not access runtime data. + # Do not use this pattern for new tests. + entry_data: HomeKitEntryData = hass.config_entries.async_get_entry( + entry.entry_id + ).runtime_data assert entry_data.pairing_qr_secret assert entry_data.pairing_qr diff --git a/tests/components/homekit_controller/common.py b/tests/components/homekit_controller/common.py index 1360b463e4a..9aba3ef3225 100644 --- a/tests/components/homekit_controller/common.py +++ b/tests/components/homekit_controller/common.py @@ -11,12 +11,7 @@ from unittest import mock from aiohomekit.controller.abstract import AbstractDescription, AbstractPairing from aiohomekit.hkjson import loads as hkloads -from aiohomekit.model import ( - Accessories, - AccessoriesState, - Accessory, - mixin as model_mixin, -) +from aiohomekit.model import Accessories, AccessoriesState, Accessory from aiohomekit.testing import FakeController, FakePairing from homeassistant.components.device_automation import DeviceAutomationType @@ -282,7 +277,7 @@ async def device_config_changed(hass: HomeAssistant, accessories: Accessories): async def setup_test_component( - hass, setup_accessory, capitalize=False, suffix=None, connection=None + hass, aid, setup_accessory, capitalize=False, suffix=None, connection=None ): """Load a fake homekit accessory based on a homekit accessory model. @@ -291,7 +286,7 @@ async def setup_test_component( If suffix is set, entityId will include the suffix """ accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) setup_accessory(accessory) @@ -397,8 +392,3 @@ async def assert_devices_and_entities_created( # Root device must not have a via, otherwise its not the device assert root_device.via_device_id is None - - -def get_next_aid(): - """Get next aid.""" - return model_mixin.id_counter + 1 diff --git a/tests/components/homekit_controller/conftest.py b/tests/components/homekit_controller/conftest.py index 427c5285436..eea3f4b67f2 100644 --- a/tests/components/homekit_controller/conftest.py +++ b/tests/components/homekit_controller/conftest.py @@ -1,5 +1,6 @@ """HomeKit controller session fixtures.""" +from collections.abc import Callable, Generator import datetime from unittest.mock import MagicMock, patch @@ -7,7 +8,6 @@ from aiohomekit.testing import FakeController from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator import homeassistant.util.dt as dt_util @@ -44,3 +44,16 @@ def hk_mock_async_zeroconf(mock_async_zeroconf: MagicMock) -> None: @pytest.fixture(autouse=True) def auto_mock_bluetooth(mock_bluetooth: None) -> None: """Auto mock bluetooth.""" + + +@pytest.fixture +def get_next_aid() -> Generator[Callable[[], int]]: + """Generate a function that returns increasing accessory ids.""" + id_counter = 0 + + def _get_id(): + nonlocal id_counter + id_counter += 1 + return id_counter + + return _get_id diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 394a442787d..2e96295a0ab 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -24,6 +24,7 @@ ]), 'manufacturer': 'Sleekpoint Innovations', 'model': 'AP2', + 'model_id': None, 'name': 'Airversa AP2 1808', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -102,7 +103,7 @@ 'original_name': 'Airversa AP2 1808 AirPurifier', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_32832', 'unit_of_measurement': None, @@ -114,7 +115,7 @@ 'percentage_step': 20.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.airversa_ap2_1808_airpurifier', 'state': 'off', @@ -621,6 +622,7 @@ ]), 'manufacturer': 'Anker', 'model': 'T8010', + 'model_id': None, 'name': 'eufy HomeBase2-0AAA', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -695,6 +697,7 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-0000', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -937,6 +940,7 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1179,6 +1183,7 @@ ]), 'manufacturer': 'Anker', 'model': 'T8113', + 'model_id': None, 'name': 'eufyCam2-000A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1425,6 +1430,7 @@ ]), 'manufacturer': 'Aqara', 'model': 'HE1-G01', + 'model_id': None, 'name': 'Aqara-Hub-E1-00A0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1632,6 +1638,7 @@ ]), 'manufacturer': 'Aqara', 'model': 'AS006', + 'model_id': None, 'name': 'Contact Sensor', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -1797,6 +1804,7 @@ ]), 'manufacturer': 'Aqara', 'model': 'ZHWA11LM', + 'model_id': None, 'name': 'Aqara Hub-1563', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2073,6 +2081,7 @@ ]), 'manufacturer': 'Aqara', 'model': 'AR004', + 'model_id': None, 'name': 'Programmable Switch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2197,6 +2206,7 @@ ]), 'manufacturer': 'Netgear, Inc', 'model': 'ABC1000', + 'model_id': None, 'name': 'ArloBabyA0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -2682,6 +2692,7 @@ ]), 'manufacturer': 'ConnectSense', 'model': 'CS-IWO', + 'model_id': None, 'name': 'InWall Outlet-0394DE', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3112,6 +3123,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Basement', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3272,6 +3284,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3727,6 +3740,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -3887,6 +3901,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Porch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4051,6 +4066,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4510,6 +4526,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Basement', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4625,6 +4642,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee3', + 'model_id': None, 'name': 'HomeW', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -4907,6 +4925,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5067,6 +5086,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'REMOTE SENSOR', + 'model_id': None, 'name': 'Porch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5231,6 +5251,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ECB501', + 'model_id': None, 'name': 'My ecobee', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5699,6 +5720,7 @@ ]), 'manufacturer': 'ecobee Inc.', 'model': 'ecobee Switch+', + 'model_id': None, 'name': 'Master Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -5989,6 +6011,7 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Degree 00AAA0000', + 'model_id': None, 'name': 'Eve Degree AA11', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6346,6 +6369,7 @@ ]), 'manufacturer': 'Elgato', 'model': 'Eve Energy 20EAO8601', + 'model_id': None, 'name': 'Eve Energy 50FF', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6685,6 +6709,7 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', + 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -6844,7 +6869,7 @@ 'original_name': 'HAA-C718B3', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -6856,7 +6881,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.haa_c718b3', 'state': 'on', @@ -6891,6 +6916,7 @@ ]), 'manufacturer': 'José A. Jiménez Campos', 'model': 'RavenSystem HAA', + 'model_id': None, 'name': 'HAA-C718B3', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7327,6 +7353,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7489,6 +7516,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7563,6 +7591,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7729,6 +7758,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7807,7 +7837,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -7819,7 +7849,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -7850,6 +7880,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -7924,6 +7955,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8002,7 +8034,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -8015,7 +8047,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -8050,6 +8082,7 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', + 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8190,7 +8223,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -8203,7 +8236,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -8373,6 +8406,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8451,6 +8485,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8525,6 +8560,7 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', + 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8699,6 +8735,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Family Room North', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8861,6 +8898,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -8935,6 +8973,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Kitchen Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9101,6 +9140,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Ceiling Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9179,7 +9219,7 @@ 'original_name': 'Ceiling Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_766313939_8', 'unit_of_measurement': None, @@ -9191,7 +9231,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.ceiling_fan', 'state': 'off', @@ -9222,6 +9262,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9296,6 +9337,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9374,7 +9416,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9388,7 +9430,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9423,6 +9465,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'Home Assistant Bridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9497,6 +9540,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Fan', + 'model_id': None, 'name': 'Living Room Fan', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9575,7 +9619,7 @@ 'original_name': 'Living Room Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1256851357_8', 'unit_of_measurement': None, @@ -9589,7 +9633,7 @@ 'percentage_step': 1.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.living_room_fan', 'state': 'off', @@ -9624,6 +9668,7 @@ ]), 'manufacturer': 'Lookin', 'model': 'Climate Control', + 'model_id': None, 'name': '89 Living Room', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -9773,7 +9818,7 @@ 'original_name': '89 Living Room', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1233851541_175', 'unit_of_measurement': None, @@ -9786,7 +9831,7 @@ 'percentage_step': 33.333333333333336, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.89_living_room', 'state': 'on', @@ -9956,6 +10001,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10034,6 +10080,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10108,6 +10155,7 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', + 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10289,6 +10337,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10363,6 +10412,7 @@ ]), 'manufacturer': 'switchbot', 'model': 'WoHumi', + 'model_id': None, 'name': 'Humidifier 182A', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10544,6 +10594,7 @@ ]), 'manufacturer': 'Home Assistant', 'model': 'Bridge', + 'model_id': None, 'name': 'HASS Bridge S6', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10618,6 +10669,7 @@ ]), 'manufacturer': 'FirstAlert', 'model': '1039102', + 'model_id': None, 'name': 'Laundry Smoke ED78', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -10807,6 +10859,7 @@ ]), 'manufacturer': 'Garzola Marco', 'model': 'Daikin-fwec3a-esp32-homekit-bridge', + 'model_id': None, 'name': 'Air Conditioner', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11006,6 +11059,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11143,6 +11197,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11280,6 +11335,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11417,6 +11473,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW012', + 'model_id': None, 'name': 'Hue ambiance candle', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11554,6 +11611,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', + 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11701,6 +11759,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LTW013', + 'model_id': None, 'name': 'Hue ambiance spot', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -11848,6 +11907,7 @@ ]), 'manufacturer': 'Philips', 'model': 'RWL021', + 'model_id': None, 'name': 'Hue dimmer switch', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12164,6 +12224,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12288,6 +12349,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12412,6 +12474,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12536,6 +12599,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12660,6 +12724,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12784,6 +12849,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -12908,6 +12974,7 @@ ]), 'manufacturer': 'Philips', 'model': 'LWB010', + 'model_id': None, 'name': 'Hue white lamp', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13032,6 +13099,7 @@ ]), 'manufacturer': 'Philips Lighting', 'model': 'BSB002', + 'model_id': None, 'name': 'Philips hue - 482544', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13110,6 +13178,7 @@ ]), 'manufacturer': 'Koogeek', 'model': 'LS1', + 'model_id': None, 'name': 'Koogeek-LS1-20833F', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13253,6 +13322,7 @@ ]), 'manufacturer': 'Koogeek', 'model': 'P1EU', + 'model_id': None, 'name': 'Koogeek-P1-A00AA0', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13417,6 +13487,7 @@ ]), 'manufacturer': 'Koogeek', 'model': 'KH02CN', + 'model_id': None, 'name': 'Koogeek-SW2-187A91', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13620,6 +13691,7 @@ ]), 'manufacturer': 'Lennox', 'model': 'E30 2B', + 'model_id': None, 'name': 'Lennox', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -13901,6 +13973,7 @@ ]), 'manufacturer': 'LG Electronics', 'model': 'OLED55B9PUA', + 'model_id': None, 'name': 'LG webOS TV AF80', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14081,6 +14154,7 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'PD-FSQN-XX', + 'model_id': None, 'name': 'Caséta® Wireless Fan Speed Control', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14159,7 +14233,7 @@ 'original_name': 'Caséta® Wireless Fan Speed Control', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_21474836482_2', 'unit_of_measurement': None, @@ -14171,7 +14245,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.caseta_r_wireless_fan_speed_control', 'state': 'off', @@ -14202,6 +14276,7 @@ ]), 'manufacturer': 'Lutron Electronics Co., Inc', 'model': 'L-BDG2-WH', + 'model_id': None, 'name': 'Smart Bridge 2', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14280,6 +14355,7 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS425F', + 'model_id': None, 'name': 'MSS425F-15cc', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14558,6 +14634,7 @@ ]), 'manufacturer': 'Meross', 'model': 'MSS565', + 'model_id': None, 'name': 'MSS565-28da', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -14686,6 +14763,7 @@ ]), 'manufacturer': 'Empowered Homes Inc.', 'model': 'v1', + 'model_id': None, 'name': 'Mysa-85dda9', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15015,6 +15093,7 @@ ]), 'manufacturer': 'Nanoleaf', 'model': 'NL55', + 'model_id': None, 'name': 'Nanoleaf Strip 3B32', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15286,6 +15365,7 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Netatmo Doorbell', + 'model_id': None, 'name': 'Netatmo-Doorbell-g738658', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15579,6 +15659,7 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Smart CO Alarm', + 'model_id': None, 'name': 'Smart CO Alarm', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -15739,6 +15820,7 @@ ]), 'manufacturer': 'Netatmo', 'model': 'Healthy Home Coach', + 'model_id': None, 'name': 'Healthy Home Coach', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16041,6 +16123,7 @@ ]), 'manufacturer': 'Green Electronics LLC', 'model': 'SPK5 Pro', + 'model_id': None, 'name': 'RainMachine-00ce4a', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16463,6 +16546,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'Master Bath South', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16625,6 +16709,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', + 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16699,6 +16784,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'RYSE SmartShade', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -16865,6 +16951,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'BR Left', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17027,6 +17114,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'LR Left', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17189,6 +17277,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'LR Right', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17351,6 +17440,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE SmartBridge', + 'model_id': None, 'name': 'RYSE SmartBridge', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17425,6 +17515,7 @@ ]), 'manufacturer': 'RYSE Inc.', 'model': 'RYSE Shade', + 'model_id': None, 'name': 'RZSS', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17591,6 +17682,7 @@ ]), 'manufacturer': 'Schlage ', 'model': 'BE479CAM619', + 'model_id': None, 'name': 'SENSE ', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17710,6 +17802,7 @@ ]), 'manufacturer': 'Hunter Fan', 'model': 'SIMPLEconnect', + 'model_id': None, 'name': 'SIMPLEconnect Fan-06F674', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17788,7 +17881,7 @@ 'original_name': 'SIMPLEconnect Fan-06F674 Hunter Fan', 'platform': 'homekit_controller', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '00:00:00:00:00:00_1_8', 'unit_of_measurement': None, @@ -17801,7 +17894,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'entity_id': 'fan.simpleconnect_fan_06f674_hunter_fan', 'state': 'off', @@ -17886,6 +17979,7 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Gateway', + 'model_id': None, 'name': 'VELUX Gateway', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -17960,6 +18054,7 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Sensor', + 'model_id': None, 'name': 'VELUX Sensor', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18169,6 +18264,7 @@ ]), 'manufacturer': 'VELUX', 'model': 'VELUX Window', + 'model_id': None, 'name': 'VELUX Window', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18290,6 +18386,7 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'Flowerbud', + 'model_id': None, 'name': 'VOCOlinc-Flowerbud-0d324b', 'name_by_user': None, 'primary_config_entry': 'TestData', @@ -18595,6 +18692,7 @@ ]), 'manufacturer': 'VOCOlinc', 'model': 'VP3', + 'model_id': None, 'name': 'VOCOlinc-VP3-123456', 'name_by_user': None, 'primary_config_entry': 'TestData', diff --git a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py index aea53e74d46..d6dc0f70015 100644 --- a/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py +++ b/tests/components/homekit_controller/specific_devices/test_fan_that_changes_features.py @@ -29,14 +29,22 @@ async def test_fan_add_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + is FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -50,9 +58,16 @@ async def test_fan_add_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) async def test_fan_remove_feature_at_runtime( @@ -75,13 +90,20 @@ async def test_fan_remove_feature_at_runtime( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to add oscillation accessories = await setup_accessories_from_file( @@ -92,10 +114,18 @@ async def test_fan_remove_feature_at_runtime( fan_state = hass.states.get("fan.living_room_fan") assert ( fan_state.attributes[ATTR_SUPPORTED_FEATURES] - is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION + is FanEntityFeature.SET_SPEED + | FanEntityFeature.DIRECTION + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) async def test_bridge_with_two_fans_one_removed( @@ -119,13 +149,20 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) fan = entity_registry.async_get("fan.ceiling_fan") assert fan.unique_id == "00:00:00:00:00:00_766313939_8" fan_state = hass.states.get("fan.ceiling_fan") - assert fan_state.attributes[ATTR_SUPPORTED_FEATURES] is FanEntityFeature.SET_SPEED + assert ( + fan_state.attributes[ATTR_SUPPORTED_FEATURES] + is FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Now change the config to remove one of the fans accessories = await setup_accessories_from_file( @@ -141,6 +178,8 @@ async def test_bridge_with_two_fans_one_removed( is FanEntityFeature.SET_SPEED | FanEntityFeature.DIRECTION | FanEntityFeature.OSCILLATE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) # The second fan should have been removed assert not hass.states.get("fan.ceiling_fan") diff --git a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py index 9c6e5a6687a..a16cd052c87 100644 --- a/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py +++ b/tests/components/homekit_controller/specific_devices/test_koogeek_ls1.py @@ -5,7 +5,7 @@ from unittest import mock from aiohomekit.exceptions import AccessoryDisconnectedError, EncryptionError from aiohomekit.model import CharacteristicsTypes, ServicesTypes -from aiohomekit.testing import FakePairing +from aiohomekit.testing import FakeController, FakePairing import pytest from homeassistant.components.homekit_controller.connection import ( @@ -48,7 +48,14 @@ async def test_recover_from_failure(hass: HomeAssistant, failure_cls) -> None: # Test that entity remains in the same state if there is a network error next_update = dt_util.utcnow() + timedelta(seconds=60) - with mock.patch.object(FakePairing, "get_characteristics") as get_char: + with ( + mock.patch.object(FakePairing, "get_characteristics") as get_char, + mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ), + ): get_char.side_effect = failure_cls("Disconnected") # Test that a poll triggers unavailable diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index a8852aac4f7..d08478641b3 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -1,12 +1,14 @@ """Basic checks for HomeKitalarm_control_panel.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_security_system_service(accessory): @@ -27,9 +29,13 @@ def create_security_system_service(accessory): targ_state.value = 50 -async def test_switch_change_alarm_state(hass: HomeAssistant) -> None: +async def test_switch_change_alarm_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component(hass, create_security_system_service) + helper = await setup_test_component( + hass, get_next_aid(), create_security_system_service + ) await hass.services.async_call( "alarm_control_panel", @@ -84,9 +90,13 @@ async def test_switch_change_alarm_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_alarm_state(hass: HomeAssistant) -> None: +async def test_switch_read_alarm_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component(hass, create_security_system_service) + helper = await setup_test_component( + hass, get_next_aid(), create_security_system_service + ) await helper.async_update( ServicesTypes.SECURITY_SYSTEM, @@ -126,7 +136,9 @@ async def test_switch_read_alarm_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a alarm_control_panel unique id.""" aid = get_next_aid() @@ -135,7 +147,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_security_system_service) + await setup_test_component(hass, aid, create_security_system_service) assert ( entity_registry.async_get(alarm_control_panel_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_binary_sensor.py b/tests/components/homekit_controller/test_binary_sensor.py index 3d4486bb38d..63b35fbe1b8 100644 --- a/tests/components/homekit_controller/test_binary_sensor.py +++ b/tests/components/homekit_controller/test_binary_sensor.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_motion_sensor_service(accessory): @@ -18,9 +20,13 @@ def create_motion_sensor_service(accessory): cur_state.value = 0 -async def test_motion_sensor_read_state(hass: HomeAssistant) -> None: +async def test_motion_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit motion sensor accessory.""" - helper = await setup_test_component(hass, create_motion_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_motion_sensor_service + ) await helper.async_update( ServicesTypes.MOTION_SENSOR, {CharacteristicsTypes.MOTION_DETECTED: False} @@ -45,9 +51,13 @@ def create_contact_sensor_service(accessory): cur_state.value = 0 -async def test_contact_sensor_read_state(hass: HomeAssistant) -> None: +async def test_contact_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_contact_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_contact_sensor_service + ) await helper.async_update( ServicesTypes.CONTACT_SENSOR, {CharacteristicsTypes.CONTACT_STATE: 0} @@ -72,9 +82,13 @@ def create_smoke_sensor_service(accessory): cur_state.value = 0 -async def test_smoke_sensor_read_state(hass: HomeAssistant) -> None: +async def test_smoke_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_smoke_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_smoke_sensor_service + ) await helper.async_update( ServicesTypes.SMOKE_SENSOR, {CharacteristicsTypes.SMOKE_DETECTED: 0} @@ -99,9 +113,13 @@ def create_carbon_monoxide_sensor_service(accessory): cur_state.value = 0 -async def test_carbon_monoxide_sensor_read_state(hass: HomeAssistant) -> None: +async def test_carbon_monoxide_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit contact accessory.""" - helper = await setup_test_component(hass, create_carbon_monoxide_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_carbon_monoxide_sensor_service + ) await helper.async_update( ServicesTypes.CARBON_MONOXIDE_SENSOR, @@ -128,9 +146,13 @@ def create_occupancy_sensor_service(accessory): cur_state.value = 0 -async def test_occupancy_sensor_read_state(hass: HomeAssistant) -> None: +async def test_occupancy_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit occupancy sensor accessory.""" - helper = await setup_test_component(hass, create_occupancy_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_occupancy_sensor_service + ) await helper.async_update( ServicesTypes.OCCUPANCY_SENSOR, {CharacteristicsTypes.OCCUPANCY_DETECTED: False} @@ -155,9 +177,13 @@ def create_leak_sensor_service(accessory): cur_state.value = 0 -async def test_leak_sensor_read_state(hass: HomeAssistant) -> None: +async def test_leak_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit leak sensor accessory.""" - helper = await setup_test_component(hass, create_leak_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_leak_sensor_service + ) await helper.async_update( ServicesTypes.LEAK_SENSOR, {CharacteristicsTypes.LEAK_DETECTED: 0} @@ -175,7 +201,9 @@ async def test_leak_sensor_read_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a binary_sensor unique id.""" aid = get_next_aid() @@ -184,7 +212,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_leak_sensor_service) + await setup_test_component(hass, aid, create_leak_sensor_service) assert ( entity_registry.async_get(binary_sensor_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_button.py b/tests/components/homekit_controller/test_button.py index 9f935569333..058194a7ebd 100644 --- a/tests/components/homekit_controller/test_button.py +++ b/tests/components/homekit_controller/test_button.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit button.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_switch_with_setup_button(accessory): @@ -39,9 +41,13 @@ def create_switch_with_ecobee_clear_hold_button(accessory): return service -async def test_press_button(hass: HomeAssistant) -> None: +async def test_press_button( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a button characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_setup_button) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_setup_button + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. button = Helper( @@ -66,10 +72,12 @@ async def test_press_button(hass: HomeAssistant) -> None: ) -async def test_ecobee_clear_hold_press_button(hass: HomeAssistant) -> None: +async def test_ecobee_clear_hold_press_button( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test ecobee clear hold button characteristic is correctly handled.""" helper = await setup_test_component( - hass, create_switch_with_ecobee_clear_hold_button + hass, get_next_aid(), create_switch_with_ecobee_clear_hold_button ) # Helper will be for the primary entity, which is the outlet. Make a helper for the button. @@ -96,7 +104,9 @@ async def test_ecobee_clear_hold_press_button(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a button unique id.""" aid = get_next_aid() @@ -105,7 +115,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:1-cid:2", ) - await setup_test_component(hass, create_switch_with_ecobee_clear_hold_button) + await setup_test_component(hass, aid, create_switch_with_ecobee_clear_hold_button) assert ( entity_registry.async_get(button_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_1_2" diff --git a/tests/components/homekit_controller/test_camera.py b/tests/components/homekit_controller/test_camera.py index de64ee95d74..6e20c1feb3c 100644 --- a/tests/components/homekit_controller/test_camera.py +++ b/tests/components/homekit_controller/test_camera.py @@ -1,6 +1,7 @@ """Basic checks for HomeKit cameras.""" import base64 +from collections.abc import Callable from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FAKE_CAMERA_IMAGE @@ -9,7 +10,7 @@ from homeassistant.components import camera from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_camera(accessory): @@ -18,7 +19,9 @@ def create_camera(accessory): async def test_migrate_unique_ids( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test migrating entity unique ids.""" aid = get_next_aid() @@ -27,23 +30,23 @@ async def test_migrate_unique_ids( "homekit_controller", f"homekit-0001-aid:{aid}", ) - await setup_test_component(hass, create_camera) + await setup_test_component(hass, aid, create_camera) assert ( entity_registry.async_get(camera.entity_id).unique_id == f"00:00:00:00:00:00_{aid}" ) -async def test_read_state(hass: HomeAssistant) -> None: +async def test_read_state(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test reading the state of a HomeKit camera.""" - helper = await setup_test_component(hass, create_camera) + helper = await setup_test_component(hass, get_next_aid(), create_camera) state = await helper.poll_and_get_state() assert state.state == "idle" -async def test_get_image(hass: HomeAssistant) -> None: +async def test_get_image(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test getting a JPEG from a camera.""" - helper = await setup_test_component(hass, create_camera) + helper = await setup_test_component(hass, get_next_aid(), create_camera) image = await camera.async_get_image(hass, helper.entity_id) assert image.content == base64.b64decode(FAKE_CAMERA_IMAGE) diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 5470c669700..183e020eb25 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitclimate.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, @@ -21,7 +23,7 @@ from homeassistant.components.climate import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component # Test thermostat devices @@ -73,9 +75,13 @@ def create_thermostat_service_min_max(accessory): char.maxValue = 1 -async def test_climate_respect_supported_op_modes_1(hass: HomeAssistant) -> None: +async def test_climate_respect_supported_op_modes_1( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component(hass, create_thermostat_service_min_max) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_service_min_max + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat"] @@ -88,16 +94,22 @@ def create_thermostat_service_valid_vals(accessory): char.valid_values = [0, 1, 2] -async def test_climate_respect_supported_op_modes_2(hass: HomeAssistant) -> None: +async def test_climate_respect_supported_op_modes_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component(hass, create_thermostat_service_valid_vals) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_service_valid_vals + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["off", "heat", "cool"] -async def test_climate_change_thermostat_state(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -178,9 +190,11 @@ async def test_climate_change_thermostat_state(hass: HomeAssistant) -> None: ) -async def test_climate_check_min_max_values_per_mode(hass: HomeAssistant) -> None: +async def test_climate_check_min_max_values_per_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we we get the appropriate min/max values for each mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -213,9 +227,11 @@ async def test_climate_check_min_max_values_per_mode(hass: HomeAssistant) -> Non assert climate_state.attributes["max_temp"] == 40 -async def test_climate_change_thermostat_temperature(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_temperature( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -244,9 +260,11 @@ async def test_climate_change_thermostat_temperature(hass: HomeAssistant) -> Non ) -async def test_climate_change_thermostat_temperature_range(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_temperature_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set separate heat and cool setpoints in heat_cool mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -278,10 +296,10 @@ async def test_climate_change_thermostat_temperature_range(hass: HomeAssistant) async def test_climate_change_thermostat_temperature_range_iphone( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we can set all three set points at once (iPhone heat_cool mode support).""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -313,10 +331,10 @@ async def test_climate_change_thermostat_temperature_range_iphone( async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we cannot set range values when not in heat_cool mode.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -372,10 +390,12 @@ def create_thermostat_single_set_point_auto(accessory): async def test_climate_check_min_max_values_per_mode_sspa_device( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test appropriate min/max values for each mode on sspa devices.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -408,9 +428,13 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 -async def test_climate_set_thermostat_temp_on_sspa_device(hass: HomeAssistant) -> None: +async def test_climate_set_thermostat_temp_on_sspa_device( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test setting temperature in different modes on device with single set point in auto.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -462,9 +486,13 @@ async def test_climate_set_thermostat_temp_on_sspa_device(hass: HomeAssistant) - ) -async def test_climate_set_mode_via_temp(hass: HomeAssistant) -> None: +async def test_climate_set_mode_via_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test setting temperature and mode at same tims.""" - helper = await setup_test_component(hass, create_thermostat_single_set_point_auto) + helper = await setup_test_component( + hass, get_next_aid(), create_thermostat_single_set_point_auto + ) await hass.services.async_call( DOMAIN, @@ -503,9 +531,11 @@ async def test_climate_set_mode_via_temp(hass: HomeAssistant) -> None: ) -async def test_climate_change_thermostat_humidity(hass: HomeAssistant) -> None: +async def test_climate_change_thermostat_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit thermostat on and off again.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( DOMAIN, @@ -534,9 +564,11 @@ async def test_climate_change_thermostat_humidity(hass: HomeAssistant) -> None: ) -async def test_climate_read_thermostat_state(hass: HomeAssistant) -> None: +async def test_climate_read_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) # Simulate that heating is on await helper.async_update( @@ -591,9 +623,11 @@ async def test_climate_read_thermostat_state(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT_COOL -async def test_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: +async def test_hvac_mode_vs_hvac_action( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -628,9 +662,11 @@ async def test_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: assert state.attributes["hvac_action"] == "heating" -async def test_hvac_mode_vs_hvac_action_current_mode_wrong(hass: HomeAssistant) -> None: +async def test_hvac_mode_vs_hvac_action_current_mode_wrong( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we cope with buggy HEATING_COOLING_CURRENT.""" - helper = await setup_test_component(hass, create_thermostat_service) + helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await helper.async_update( ServicesTypes.THERMOSTAT, @@ -692,9 +728,13 @@ def create_heater_cooler_service_min_max(accessory): char.maxValue = 2 -async def test_heater_cooler_respect_supported_op_modes_1(hass: HomeAssistant) -> None: +async def test_heater_cooler_respect_supported_op_modes_1( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects minValue/maxValue hints.""" - helper = await setup_test_component(hass, create_heater_cooler_service_min_max) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service_min_max + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] @@ -707,16 +747,24 @@ def create_theater_cooler_service_valid_vals(accessory): char.valid_values = [1, 2] -async def test_heater_cooler_respect_supported_op_modes_2(hass: HomeAssistant) -> None: +async def test_heater_cooler_respect_supported_op_modes_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that climate respects validValue hints.""" - helper = await setup_test_component(hass, create_theater_cooler_service_valid_vals) + helper = await setup_test_component( + hass, get_next_aid(), create_theater_cooler_service_valid_vals + ) state = await helper.poll_and_get_state() assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -async def test_heater_cooler_change_thermostat_state(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the operational mode.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -771,12 +819,16 @@ async def test_heater_cooler_change_thermostat_state(hass: HomeAssistant) -> Non ) -async def test_can_turn_on_after_off(hass: HomeAssistant) -> None: +async def test_can_turn_on_after_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we always force device from inactive to active when setting mode. This is a regression test for #81863. """ - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -806,9 +858,13 @@ async def test_can_turn_on_after_off(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_change_thermostat_temperature(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_thermostat_temperature( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the target temperature.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -849,9 +905,13 @@ async def test_heater_cooler_change_thermostat_temperature(hass: HomeAssistant) ) -async def test_heater_cooler_change_fan_speed(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_fan_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the target fan speed.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -897,9 +957,13 @@ async def test_heater_cooler_change_fan_speed(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_read_fan_speed(hass: HomeAssistant) -> None: +async def test_heater_cooler_read_fan_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that fan speed is off await helper.async_update( @@ -946,9 +1010,13 @@ async def test_heater_cooler_read_fan_speed(hass: HomeAssistant) -> None: assert state.attributes["fan_mode"] == "high" -async def test_heater_cooler_read_thermostat_state(hass: HomeAssistant) -> None: +async def test_heater_cooler_read_thermostat_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit thermostat accessory.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that heating is on await helper.async_update( @@ -1000,9 +1068,13 @@ async def test_heater_cooler_read_thermostat_state(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT_COOL -async def test_heater_cooler_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> None: +async def test_heater_cooler_hvac_mode_vs_hvac_action( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Check that we haven't conflated hvac_mode and hvac_action.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that current temperature is above target temp # Heating might be on, but hvac_action currently 'off' @@ -1039,9 +1111,13 @@ async def test_heater_cooler_hvac_mode_vs_hvac_action(hass: HomeAssistant) -> No assert state.attributes["hvac_action"] == "heating" -async def test_heater_cooler_change_swing_mode(hass: HomeAssistant) -> None: +async def test_heater_cooler_change_swing_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can change the swing mode.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) await hass.services.async_call( DOMAIN, @@ -1070,9 +1146,13 @@ async def test_heater_cooler_change_swing_mode(hass: HomeAssistant) -> None: ) -async def test_heater_cooler_turn_off(hass: HomeAssistant) -> None: +async def test_heater_cooler_turn_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that both hvac_action and hvac_mode return "off" when turned off.""" - helper = await setup_test_component(hass, create_heater_cooler_service) + helper = await setup_test_component( + hass, get_next_aid(), create_heater_cooler_service + ) # Simulate that the device is turned off but CURRENT_HEATER_COOLER_STATE still returns HEATING/COOLING await helper.async_update( @@ -1090,7 +1170,9 @@ async def test_heater_cooler_turn_off(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -1099,7 +1181,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_heater_cooler_service) + await setup_test_component(hass, aid, create_heater_cooler_service) assert ( entity_registry.async_get(climate_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index a336758f4ac..420c9d45803 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -211,13 +211,13 @@ def setup_mock_accessory(controller): bridge = Accessories() accessory = Accessory.create_with_info( + 1, name="Koogeek-LS1-20833F", manufacturer="Koogeek", model="LS1", serial_number="12345", firmware_revision="1.1", ) - accessory.aid = 1 service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 0a77509d675..60ef0b1c547 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -1,8 +1,13 @@ """Tests for HKDevice.""" +from collections.abc import Callable import dataclasses +from unittest import mock from aiohomekit.controller import TransportType +from aiohomekit.model.characteristics import CharacteristicsTypes +from aiohomekit.model.services import ServicesTypes +from aiohomekit.testing import FakeController import pytest from homeassistant.components.homekit_controller.const import ( @@ -12,11 +17,17 @@ from homeassistant.components.homekit_controller.const import ( IDENTIFIER_LEGACY_SERIAL_NUMBER, ) from homeassistant.components.thread import async_add_dataset, dataset_store +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from .common import setup_accessories_from_file, setup_platform, setup_test_accessories +from .common import ( + setup_accessories_from_file, + setup_platform, + setup_test_accessories, + setup_test_component, +) from tests.common import MockConfigEntry @@ -331,3 +342,56 @@ async def test_thread_provision_migration_failed(hass: HomeAssistant) -> None: ) assert config_entry.data["Connection"] == "BLE" + + +async def test_skip_polling_all_watchable_accessory_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that we skip polling if available and all chars are watchable accessory mode.""" + + def _create_accessory(accessory): + service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") + + on_char = service.add_char(CharacteristicsTypes.ON) + on_char.value = 0 + + brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) + brightness.value = 0 + + return service + + helper = await setup_test_component(hass, get_next_aid(), _create_accessory) + + with mock.patch.object( + helper.pairing, + "get_characteristics", + wraps=helper.pairing.get_characteristics, + ) as mock_get_characteristics: + # Initial state is that the light is off + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 0 + + # Test device goes offline + helper.pairing.available = False + with mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ): + state = await helper.poll_and_get_state() + assert state.state == STATE_UNAVAILABLE + # Tries twice before declaring unavailable + assert mock_get_characteristics.call_count == 2 + + # Test device comes back online + helper.pairing.available = True + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 3 + + # Next poll should not happen because its a single + # accessory, available, and all chars are watchable + state = await helper.poll_and_get_state() + assert state.state == STATE_OFF + assert mock_get_characteristics.call_count == 3 diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index 2157eb51212..c819eac1f5a 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitalarm_control_panel.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_window_covering_service(accessory): @@ -113,9 +115,13 @@ def create_window_covering_service_with_none_tilt(accessory): tilt_target.maxValue = 0 -async def test_change_window_cover_state(hass: HomeAssistant) -> None: +async def test_change_window_cover_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit alarm on and off again.""" - helper = await setup_test_component(hass, create_window_covering_service) + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service + ) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -138,9 +144,13 @@ async def test_change_window_cover_state(hass: HomeAssistant) -> None: ) -async def test_read_window_cover_state(hass: HomeAssistant) -> None: +async def test_read_window_cover_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit alarm accessory.""" - helper = await setup_test_component(hass, create_window_covering_service) + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service + ) await helper.async_update( ServicesTypes.WINDOW_COVERING, @@ -171,10 +181,12 @@ async def test_read_window_cover_state(hass: HomeAssistant) -> None: assert state.attributes["obstruction-detected"] is True -async def test_read_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_horizontal( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt + hass, get_next_aid(), create_window_covering_service_with_h_tilt ) await helper.async_update( @@ -186,10 +198,12 @@ async def test_read_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_horizontal_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 ) await helper.async_update( @@ -201,10 +215,12 @@ async def test_read_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_vertical( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await helper.async_update( @@ -216,10 +232,12 @@ async def test_read_window_cover_tilt_vertical(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_vertical_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is handled correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 ) await helper.async_update( @@ -231,10 +249,12 @@ async def test_read_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: assert state.attributes["current_tilt_position"] == 83 -async def test_read_window_cover_tilt_missing_tilt(hass: HomeAssistant) -> None: +async def test_read_window_cover_tilt_missing_tilt( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that missing tilt is handled.""" helper = await setup_test_component( - hass, create_window_covering_service_with_none_tilt + hass, get_next_aid(), create_window_covering_service_with_none_tilt ) await helper.async_update( @@ -246,10 +266,12 @@ async def test_read_window_cover_tilt_missing_tilt(hass: HomeAssistant) -> None: assert state.state != STATE_UNAVAILABLE -async def test_write_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_horizontal( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt + hass, get_next_aid(), create_window_covering_service_with_h_tilt ) await hass.services.async_call( @@ -267,10 +289,12 @@ async def test_write_window_cover_tilt_horizontal(hass: HomeAssistant) -> None: ) -async def test_write_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_horizontal_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that horizontal tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_h_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_h_tilt_2 ) await hass.services.async_call( @@ -288,10 +312,12 @@ async def test_write_window_cover_tilt_horizontal_2(hass: HomeAssistant) -> None ) -async def test_write_window_cover_tilt_vertical(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_vertical( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -309,10 +335,12 @@ async def test_write_window_cover_tilt_vertical(hass: HomeAssistant) -> None: ) -async def test_write_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: +async def test_write_window_cover_tilt_vertical_2( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt_2 + hass, get_next_aid(), create_window_covering_service_with_v_tilt_2 ) await hass.services.async_call( @@ -330,10 +358,12 @@ async def test_write_window_cover_tilt_vertical_2(hass: HomeAssistant) -> None: ) -async def test_window_cover_stop(hass: HomeAssistant) -> None: +async def test_window_cover_stop( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that vertical tilt is written correctly.""" helper = await setup_test_component( - hass, create_window_covering_service_with_v_tilt + hass, get_next_aid(), create_window_covering_service_with_v_tilt ) await hass.services.async_call( @@ -366,9 +396,13 @@ def create_garage_door_opener_service(accessory): return service -async def test_change_door_state(hass: HomeAssistant) -> None: +async def test_change_door_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn open and close a HomeKit garage door.""" - helper = await setup_test_component(hass, create_garage_door_opener_service) + helper = await setup_test_component( + hass, get_next_aid(), create_garage_door_opener_service + ) await hass.services.async_call( "cover", "open_cover", {"entity_id": helper.entity_id}, blocking=True @@ -391,9 +425,13 @@ async def test_change_door_state(hass: HomeAssistant) -> None: ) -async def test_read_door_state(hass: HomeAssistant) -> None: +async def test_read_door_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit garage door.""" - helper = await setup_test_component(hass, create_garage_door_opener_service) + helper = await setup_test_component( + hass, get_next_aid(), create_garage_door_opener_service + ) await helper.async_update( ServicesTypes.GARAGE_DOOR_OPENER, @@ -432,7 +470,9 @@ async def test_read_door_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a cover unique id.""" aid = get_next_aid() @@ -441,7 +481,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_garage_door_opener_service) + await setup_test_component(hass, aid, create_garage_door_opener_service) assert ( entity_registry.async_get(cover_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_device_trigger.py b/tests/components/homekit_controller/test_device_trigger.py index 43572f56d50..ecf34868b6c 100644 --- a/tests/components/homekit_controller/test_device_trigger.py +++ b/tests/components/homekit_controller/test_device_trigger.py @@ -1,5 +1,7 @@ """Test homekit_controller stateless triggers.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes import pytest @@ -15,7 +17,7 @@ from homeassistant.setup import async_setup_component from .common import setup_test_component -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -23,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - def create_remote(accessory): """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -88,9 +84,10 @@ async def test_enumerate_remote( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that remote is correctly enumerated.""" - await setup_test_component(hass, create_remote) + await setup_test_component(hass, get_next_aid(), create_remote) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -139,9 +136,10 @@ async def test_enumerate_button( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, create_button) + await setup_test_component(hass, get_next_aid(), create_button) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -189,9 +187,10 @@ async def test_enumerate_doorbell( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that a button is correctly enumerated.""" - await setup_test_component(hass, create_doorbell) + await setup_test_component(hass, get_next_aid(), create_doorbell) bat_sensor = entity_registry.async_get("sensor.testdevice_battery") identify_button = entity_registry.async_get("button.testdevice_identify") @@ -239,10 +238,11 @@ async def test_handle_events( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + get_next_aid: Callable[[], int], + service_calls: list[ServiceCall], ) -> None: """Test that events are handled.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -303,8 +303,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -312,7 +312,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -320,7 +320,7 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -328,8 +328,8 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -338,6 +338,7 @@ async def test_handle_events( {"entity_id": "automation.long_press"}, blocking=True, ) + assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -345,6 +346,7 @@ async def test_handle_events( {"entity_id": "automation.single_press"}, blocking=True, ) + assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -352,17 +354,18 @@ async def test_handle_events( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 async def test_handle_events_late_setup( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + get_next_aid: Callable[[], int], + service_calls: list[ServiceCall], ) -> None: """Test that events are handled when setup happens after startup.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entry = entity_registry.async_get("sensor.testdevice_battery") @@ -432,8 +435,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "device - button1 - single_press - 0" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "device - button1 - single_press - 0" # Make sure automation doesn't trigger for a polled None helper.pairing.testing.update_named_service( @@ -441,7 +444,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for long press helper.pairing.testing.update_named_service( @@ -449,7 +452,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure automation doesn't trigger for double press helper.pairing.testing.update_named_service( @@ -457,7 +460,7 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Make sure second automation fires for long press helper.pairing.testing.update_named_service( @@ -465,8 +468,8 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "device - button2 - long_press - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "device - button2 - long_press - 0" # Turn the automations off await hass.services.async_call( @@ -475,6 +478,7 @@ async def test_handle_events_late_setup( {"entity_id": "automation.long_press"}, blocking=True, ) + assert len(service_calls) == 3 await hass.services.async_call( "automation", @@ -482,6 +486,7 @@ async def test_handle_events_late_setup( {"entity_id": "automation.single_press"}, blocking=True, ) + assert len(service_calls) == 4 # Make sure event no longer fires helper.pairing.testing.update_named_service( @@ -489,4 +494,4 @@ async def test_handle_events_late_setup( ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 4 diff --git a/tests/components/homekit_controller/test_event.py b/tests/components/homekit_controller/test_event.py index e139b49982a..99dcf38fafc 100644 --- a/tests/components/homekit_controller/test_event.py +++ b/tests/components/homekit_controller/test_event.py @@ -1,5 +1,7 @@ """Test homekit_controller stateless triggers.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -65,9 +67,13 @@ def create_doorbell(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -async def test_remote(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_remote( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], +) -> None: """Test that remote is supported.""" - helper = await setup_test_component(hass, create_remote) + helper = await setup_test_component(hass, get_next_aid(), create_remote) entities = [ ("event.testdevice_button_1", "Button 1"), @@ -108,9 +114,13 @@ async def test_remote(hass: HomeAssistant, entity_registry: er.EntityRegistry) - assert state.attributes["event_type"] == "long_press" -async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: +async def test_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], +) -> None: """Test that a button is correctly enumerated.""" - helper = await setup_test_component(hass, create_button) + helper = await setup_test_component(hass, get_next_aid(), create_button) entity_id = "event.testdevice_button_1" button = entity_registry.async_get(entity_id) @@ -145,10 +155,12 @@ async def test_button(hass: HomeAssistant, entity_registry: er.EntityRegistry) - async def test_doorbell( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that doorbell service is handled.""" - helper = await setup_test_component(hass, create_doorbell) + helper = await setup_test_component(hass, get_next_aid(), create_doorbell) entity_id = "event.testdevice_doorbell" doorbell = entity_registry.async_get(entity_id) diff --git a/tests/components/homekit_controller/test_fan.py b/tests/components/homekit_controller/test_fan.py index 428d3ab7d50..8de447144af 100644 --- a/tests/components/homekit_controller/test_fan.py +++ b/tests/components/homekit_controller/test_fan.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit fans.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_fan_service(accessory): @@ -90,9 +92,11 @@ def create_fanv2_service_without_rotation_speed(accessory): swing_mode.value = 0 -async def test_fan_read_state(hass: HomeAssistant) -> None: +async def test_fan_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ON: False} @@ -105,9 +109,9 @@ async def test_fan_read_state(hass: HomeAssistant) -> None: assert state.state == "on" -async def test_turn_on(hass: HomeAssistant) -> None: +async def test_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await hass.services.async_call( "fan", @@ -152,10 +156,12 @@ async def test_turn_on(hass: HomeAssistant) -> None: ) -async def test_turn_on_off_without_rotation_speed(hass: HomeAssistant) -> None: +async def test_turn_on_off_without_rotation_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a fan on.""" helper = await setup_test_component( - hass, create_fanv2_service_without_rotation_speed + hass, get_next_aid(), create_fanv2_service_without_rotation_speed ) await hass.services.async_call( @@ -185,9 +191,9 @@ async def test_turn_on_off_without_rotation_speed(hass: HomeAssistant) -> None: ) -async def test_turn_off(hass: HomeAssistant) -> None: +async def test_turn_off(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -205,9 +211,9 @@ async def test_turn_off(hass: HomeAssistant) -> None: ) -async def test_set_speed(hass: HomeAssistant) -> None: +async def test_set_speed(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -264,9 +270,11 @@ async def test_set_speed(hass: HomeAssistant) -> None: ) -async def test_set_percentage(hass: HomeAssistant) -> None: +async def test_set_percentage( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await helper.async_update(ServicesTypes.FAN, {CharacteristicsTypes.ON: 1}) @@ -297,9 +305,9 @@ async def test_set_percentage(hass: HomeAssistant) -> None: ) -async def test_speed_read(hass: HomeAssistant) -> None: +async def test_speed_read(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, @@ -337,9 +345,11 @@ async def test_speed_read(hass: HomeAssistant) -> None: assert state.attributes["percentage"] == 0 -async def test_set_direction(hass: HomeAssistant) -> None: +async def test_set_direction( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) await hass.services.async_call( "fan", @@ -368,9 +378,11 @@ async def test_set_direction(hass: HomeAssistant) -> None: ) -async def test_direction_read(hass: HomeAssistant) -> None: +async def test_direction_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fan_service) + helper = await setup_test_component(hass, get_next_aid(), create_fan_service) state = await helper.async_update( ServicesTypes.FAN, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -383,9 +395,11 @@ async def test_direction_read(hass: HomeAssistant) -> None: assert state.attributes["direction"] == "reverse" -async def test_fanv2_read_state(hass: HomeAssistant) -> None: +async def test_fanv2_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: False} @@ -398,9 +412,9 @@ async def test_fanv2_read_state(hass: HomeAssistant) -> None: assert state.state == "on" -async def test_v2_turn_on(hass: HomeAssistant) -> None: +async def test_v2_turn_on(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn a fan on.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -473,9 +487,11 @@ async def test_v2_turn_on(hass: HomeAssistant) -> None: ) -async def test_v2_turn_off(hass: HomeAssistant) -> None: +async def test_v2_turn_off( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a fan off.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -493,9 +509,11 @@ async def test_v2_turn_off(hass: HomeAssistant) -> None: ) -async def test_v2_set_speed(hass: HomeAssistant) -> None: +async def test_v2_set_speed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -552,9 +570,11 @@ async def test_v2_set_speed(hass: HomeAssistant) -> None: ) -async def test_v2_set_percentage(hass: HomeAssistant) -> None: +async def test_v2_set_percentage( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -585,9 +605,13 @@ async def test_v2_set_percentage(hass: HomeAssistant) -> None: ) -async def test_v2_set_percentage_with_min_step(hass: HomeAssistant) -> None: +async def test_v2_set_percentage_with_min_step( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we set fan speed by percentage.""" - helper = await setup_test_component(hass, create_fanv2_service_with_min_step) + helper = await setup_test_component( + hass, get_next_aid(), create_fanv2_service_with_min_step + ) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -618,9 +642,11 @@ async def test_v2_set_percentage_with_min_step(hass: HomeAssistant) -> None: ) -async def test_v2_speed_read(hass: HomeAssistant) -> None: +async def test_v2_speed_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, @@ -657,9 +683,11 @@ async def test_v2_speed_read(hass: HomeAssistant) -> None: assert state.attributes["percentage"] == 0 -async def test_v2_set_direction(hass: HomeAssistant) -> None: +async def test_v2_set_direction( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set fan spin direction.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -688,9 +716,11 @@ async def test_v2_set_direction(hass: HomeAssistant) -> None: ) -async def test_v2_direction_read(hass: HomeAssistant) -> None: +async def test_v2_direction_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.ROTATION_DIRECTION: 0} @@ -703,9 +733,11 @@ async def test_v2_direction_read(hass: HomeAssistant) -> None: assert state.attributes["direction"] == "reverse" -async def test_v2_oscillate(hass: HomeAssistant) -> None: +async def test_v2_oscillate( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can control a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) await hass.services.async_call( "fan", @@ -734,9 +766,11 @@ async def test_v2_oscillate(hass: HomeAssistant) -> None: ) -async def test_v2_oscillate_read(hass: HomeAssistant) -> None: +async def test_v2_oscillate_read( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read a fans oscillation.""" - helper = await setup_test_component(hass, create_fanv2_service) + helper = await setup_test_component(hass, get_next_aid(), create_fanv2_service) state = await helper.async_update( ServicesTypes.FAN_V2, {CharacteristicsTypes.SWING_MODE: 0} @@ -750,11 +784,11 @@ async def test_v2_oscillate_read(hass: HomeAssistant) -> None: async def test_v2_set_percentage_non_standard_rotation_range( - hass: HomeAssistant, + hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: """Test that we set fan speed with a non-standard rotation range.""" helper = await setup_test_component( - hass, create_fanv2_service_non_standard_rotation_range + hass, get_next_aid(), create_fanv2_service_non_standard_rotation_range ) await helper.async_update(ServicesTypes.FAN_V2, {CharacteristicsTypes.ACTIVE: 1}) @@ -813,7 +847,9 @@ async def test_v2_set_percentage_non_standard_rotation_range( async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a fan unique id.""" aid = get_next_aid() @@ -822,7 +858,9 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_fanv2_service_non_standard_rotation_range) + await setup_test_component( + hass, aid, create_fanv2_service_non_standard_rotation_range + ) assert ( entity_registry.async_get(fan_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index 60c74be3949..a031086e93d 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit Humidifier/Dehumidifier.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -7,7 +9,7 @@ from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_humidifier_service(accessory): @@ -64,9 +66,11 @@ def create_dehumidifier_service(accessory): return service -async def test_humidifier_active_state(hass: HomeAssistant) -> None: +async def test_humidifier_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit humidifier on and off again.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True @@ -87,9 +91,13 @@ async def test_humidifier_active_state(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_active_state(hass: HomeAssistant) -> None: +async def test_dehumidifier_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit dehumidifier on and off again.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True @@ -110,9 +118,11 @@ async def test_dehumidifier_active_state(hass: HomeAssistant) -> None: ) -async def test_humidifier_read_humidity(hass: HomeAssistant) -> None: +async def test_humidifier_read_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -149,9 +159,13 @@ async def test_humidifier_read_humidity(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_dehumidifier_read_humidity(hass: HomeAssistant) -> None: +async def test_dehumidifier_read_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -186,9 +200,11 @@ async def test_dehumidifier_read_humidity(hass: HomeAssistant) -> None: assert state.attributes["humidity"] == 40 -async def test_humidifier_set_humidity(hass: HomeAssistant) -> None: +async def test_humidifier_set_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, @@ -202,9 +218,13 @@ async def test_humidifier_set_humidity(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_set_humidity(hass: HomeAssistant) -> None: +async def test_dehumidifier_set_humidity( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, @@ -218,9 +238,11 @@ async def test_dehumidifier_set_humidity(hass: HomeAssistant) -> None: ) -async def test_humidifier_set_mode(hass: HomeAssistant) -> None: +async def test_humidifier_set_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the mode of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( DOMAIN, @@ -251,9 +273,13 @@ async def test_humidifier_set_mode(hass: HomeAssistant) -> None: ) -async def test_dehumidifier_set_mode(hass: HomeAssistant) -> None: +async def test_dehumidifier_set_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the mode of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) await hass.services.async_call( DOMAIN, @@ -284,9 +310,11 @@ async def test_dehumidifier_set_mode(hass: HomeAssistant) -> None: ) -async def test_humidifier_read_only_mode(hass: HomeAssistant) -> None: +async def test_humidifier_read_only_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -324,9 +352,13 @@ async def test_humidifier_read_only_mode(hass: HomeAssistant) -> None: assert state.attributes["mode"] == "normal" -async def test_dehumidifier_read_only_mode(hass: HomeAssistant) -> None: +async def test_dehumidifier_read_only_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.poll_and_get_state() assert state.attributes["mode"] == "normal" @@ -364,9 +396,11 @@ async def test_dehumidifier_read_only_mode(hass: HomeAssistant) -> None: assert state.attributes["mode"] == "normal" -async def test_humidifier_target_humidity_modes(hass: HomeAssistant) -> None: +async def test_humidifier_target_humidity_modes( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit humidifier accessory.""" - helper = await setup_test_component(hass, create_humidifier_service) + helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -409,9 +443,13 @@ async def test_humidifier_target_humidity_modes(hass: HomeAssistant) -> None: assert state.attributes["humidity"] == 37 -async def test_dehumidifier_target_humidity_modes(hass: HomeAssistant) -> None: +async def test_dehumidifier_target_humidity_modes( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit dehumidifier accessory.""" - helper = await setup_test_component(hass, create_dehumidifier_service) + helper = await setup_test_component( + hass, get_next_aid(), create_dehumidifier_service + ) state = await helper.async_update( ServicesTypes.HUMIDIFIER_DEHUMIDIFIER, @@ -457,7 +495,9 @@ async def test_dehumidifier_target_humidity_modes(hass: HomeAssistant) -> None: async def test_migrate_entity_ids( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test that we can migrate humidifier entity ids.""" aid = get_next_aid() @@ -467,7 +507,7 @@ async def test_migrate_entity_ids( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_humidifier_service) + await setup_test_component(hass, aid, create_humidifier_service) assert ( entity_registry.async_get(humidifier_entry.entity_id).unique_id == f"00:00:00:00:00:00_{aid}_8" diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index 542d87d0b0e..c443e56b3a4 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -1,5 +1,6 @@ """Tests for homekit_controller init.""" +from collections.abc import Callable from datetime import timedelta import pathlib from unittest.mock import patch @@ -46,9 +47,11 @@ def create_motion_sensor_service(accessory): cur_state.value = 0 -async def test_unload_on_stop(hass: HomeAssistant) -> None: +async def test_unload_on_stop( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test async_unload is called on stop.""" - await setup_test_component(hass, create_motion_sensor_service) + await setup_test_component(hass, get_next_aid(), create_motion_sensor_service) with patch( "homeassistant.components.homekit_controller.HKDevice.async_unload" ) as async_unlock_mock: @@ -58,9 +61,13 @@ async def test_unload_on_stop(hass: HomeAssistant) -> None: assert async_unlock_mock.called -async def test_async_remove_entry(hass: HomeAssistant) -> None: +async def test_async_remove_entry( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test unpairing a component.""" - helper = await setup_test_component(hass, create_motion_sensor_service) + helper = await setup_test_component( + hass, get_next_aid(), create_motion_sensor_service + ) controller = helper.pairing.controller hkid = "00:00:00:00:00:00" @@ -88,10 +95,13 @@ async def test_device_remove_devices( device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, hass_ws_client: WebSocketGenerator, + get_next_aid: Callable[[], int], ) -> None: """Test we can only remove a device that no longer exists.""" assert await async_setup_component(hass, "config", {}) - helper: Helper = await setup_test_component(hass, create_alive_service) + helper: Helper = await setup_test_component( + hass, get_next_aid(), create_alive_service + ) config_entry = helper.config_entry entry_id = config_entry.entry_id @@ -110,10 +120,13 @@ async def test_device_remove_devices( assert response["success"] -async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: +async def test_offline_device_raises( + hass: HomeAssistant, get_next_aid: Callable[[], int], controller +) -> None: """Test an offline device raises ConfigEntryNotReady.""" is_connected = False + aid = get_next_aid() class OfflineFakePairing(FakePairing): """Fake pairing that can flip is_connected.""" @@ -140,7 +153,7 @@ async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: return {} accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -162,11 +175,12 @@ async def test_offline_device_raises(hass: HomeAssistant, controller) -> None: async def test_ble_device_only_checks_is_available( - hass: HomeAssistant, controller + hass: HomeAssistant, get_next_aid: Callable[[], int], controller ) -> None: """Test a BLE device only checks is_available.""" is_available = False + aid = get_next_aid() class FakeBLEPairing(FakePairing): """Fake BLE pairing that can flip is_available.""" @@ -197,7 +211,7 @@ async def test_ble_device_only_checks_is_available( return {} accessory = Accessory.create_with_info( - "TestDevice", "example.com", "Test", "0001", "0.1" + aid, "TestDevice", "example.com", "Test", "0001", "0.1" ) create_alive_service(accessory) @@ -273,12 +287,16 @@ async def test_snapshots( entry = asdict(entity_entry) entry.pop("id", None) entry.pop("device_id", None) + entry.pop("created_at", None) + entry.pop("modified_at", None) entities.append({"entry": entry, "state": state_dict}) device_dict = asdict(device) device_dict.pop("id", None) device_dict.pop("via_device_id", None) + device_dict.pop("created_at", None) + device_dict.pop("modified_at", None) devices.append({"device": device_dict, "entities": entities}) assert snapshot == devices diff --git a/tests/components/homekit_controller/test_light.py b/tests/components/homekit_controller/test_light.py index c2644735ecb..04f4d3f5e29 100644 --- a/tests/components/homekit_controller/test_light.py +++ b/tests/components/homekit_controller/test_light.py @@ -1,7 +1,11 @@ """Basic checks for HomeKitSwitch.""" +from collections.abc import Callable +from unittest import mock + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes +from aiohomekit.testing import FakeController from homeassistant.components.homekit_controller.const import KNOWN_DEVICES from homeassistant.components.light import ( @@ -13,7 +17,7 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component LIGHT_BULB_NAME = "TestDevice" LIGHT_BULB_ENTITY_ID = "light.testdevice" @@ -55,9 +59,13 @@ def create_lightbulb_service_with_color_temp(accessory): return service -async def test_switch_change_light_state(hass: HomeAssistant) -> None: +async def test_switch_change_light_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit light on and off again.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) await hass.services.async_call( "light", @@ -102,9 +110,13 @@ async def test_switch_change_light_state(hass: HomeAssistant) -> None: ) -async def test_switch_change_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_change_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn change color_temp.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) await hass.services.async_call( "light", @@ -122,9 +134,11 @@ async def test_switch_change_light_state_color_temp(hass: HomeAssistant) -> None ) -async def test_switch_read_light_state_dimmer(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_dimmer( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service) + helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -157,9 +171,11 @@ async def test_switch_read_light_state_dimmer(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_push_light_state_dimmer(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_dimmer( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service) + helper = await setup_test_component(hass, get_next_aid(), create_lightbulb_service) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -185,9 +201,13 @@ async def test_switch_push_light_state_dimmer(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_read_light_state_hs(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_hs( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -248,9 +268,13 @@ async def test_switch_read_light_state_hs(hass: HomeAssistant) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_hs(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_hs( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_hs) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_hs + ) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -279,9 +303,13 @@ async def test_switch_push_light_state_hs(hass: HomeAssistant) -> None: assert state.state == "off" -async def test_switch_read_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_read_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the color_temp of a light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -307,9 +335,13 @@ async def test_switch_read_light_state_color_temp(hass: HomeAssistant) -> None: assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 -async def test_switch_push_light_state_color_temp(hass: HomeAssistant) -> None: +async def test_switch_push_light_state_color_temp( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit light accessory.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = hass.states.get(LIGHT_BULB_ENTITY_ID) @@ -328,9 +360,13 @@ async def test_switch_push_light_state_color_temp(hass: HomeAssistant) -> None: assert state.attributes["color_temp"] == 400 -async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> None: +async def test_light_becomes_unavailable_but_recovers( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test transition to and from unavailable state.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -338,7 +374,12 @@ async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> No # Test device goes offline helper.pairing.available = False - state = await helper.poll_and_get_state() + with mock.patch.object( + FakeController, + "async_reachable", + return_value=False, + ): + state = await helper.poll_and_get_state() assert state.state == "unavailable" # Simulate that someone switched on the device in the real world not via HA @@ -356,9 +397,13 @@ async def test_light_becomes_unavailable_but_recovers(hass: HomeAssistant) -> No assert state.attributes["color_temp"] == 400 -async def test_light_unloaded_removed(hass: HomeAssistant) -> None: +async def test_light_unloaded_removed( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test entity and HKDevice are correctly unloaded and removed.""" - helper = await setup_test_component(hass, create_lightbulb_service_with_color_temp) + helper = await setup_test_component( + hass, get_next_aid(), create_lightbulb_service_with_color_temp + ) # Initial state is that the light is off state = await helper.poll_and_get_state() @@ -382,7 +427,9 @@ async def test_light_unloaded_removed(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a light unique id.""" aid = get_next_aid() @@ -391,7 +438,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(light_entry.entity_id).unique_id @@ -400,7 +447,9 @@ async def test_migrate_unique_id( async def test_only_migrate_once( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we handle migration happening after an upgrade and than a downgrade and then an upgrade.""" aid = get_next_aid() @@ -414,7 +463,7 @@ async def test_only_migrate_once( "homekit_controller", f"00:00:00:00:00:00_{aid}_8", ) - await setup_test_component(hass, create_lightbulb_service_with_color_temp) + await setup_test_component(hass, aid, create_lightbulb_service_with_color_temp) assert ( entity_registry.async_get(old_light_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_lock.py b/tests/components/homekit_controller/test_lock.py index db248b82b1a..e56ca5fcffe 100644 --- a/tests/components/homekit_controller/test_lock.py +++ b/tests/components/homekit_controller/test_lock.py @@ -1,12 +1,14 @@ """Basic checks for HomeKitLock.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_lock_service(accessory): @@ -29,9 +31,11 @@ def create_lock_service(accessory): return service -async def test_switch_change_lock_state(hass: HomeAssistant) -> None: +async def test_switch_change_lock_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit lock on and off again.""" - helper = await setup_test_component(hass, create_lock_service) + helper = await setup_test_component(hass, get_next_aid(), create_lock_service) await hass.services.async_call( "lock", "lock", {"entity_id": "lock.testdevice"}, blocking=True @@ -54,9 +58,11 @@ async def test_switch_change_lock_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_lock_state(hass: HomeAssistant) -> None: +async def test_switch_read_lock_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit lock accessory.""" - helper = await setup_test_component(hass, create_lock_service) + helper = await setup_test_component(hass, get_next_aid(), create_lock_service) state = await helper.async_update( ServicesTypes.LOCK_MECHANISM, @@ -119,7 +125,9 @@ async def test_switch_read_lock_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a lock unique id.""" aid = get_next_aid() @@ -128,7 +136,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_lock_service) + await setup_test_component(hass, aid, create_lock_service) assert ( entity_registry.async_get(lock_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index 62a042ff7b9..a7f900217d7 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit motion sensors and contact sensors.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( CharacteristicPermissions, CharacteristicsTypes, @@ -10,7 +12,7 @@ import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_tv_service(accessory): @@ -62,9 +64,11 @@ def create_tv_service_with_target_media_state(accessory): return service -async def test_tv_read_state(hass: HomeAssistant) -> None: +async def test_tv_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit fan accessory.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) state = await helper.async_update( ServicesTypes.TELEVISION, @@ -91,18 +95,22 @@ async def test_tv_read_state(hass: HomeAssistant) -> None: assert state.state == "idle" -async def test_tv_read_sources(hass: HomeAssistant) -> None: +async def test_tv_read_sources( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) state = await helper.poll_and_get_state() assert state.attributes["source"] == "HDMI 1" assert state.attributes["source_list"] == ["HDMI 1", "HDMI 2"] -async def test_play_remote_key(hass: HomeAssistant) -> None: +async def test_play_remote_key( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -147,9 +155,11 @@ async def test_play_remote_key(hass: HomeAssistant) -> None: ) -async def test_pause_remote_key(hass: HomeAssistant) -> None: +async def test_pause_remote_key( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can pause a media player.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await helper.async_update( ServicesTypes.TELEVISION, @@ -194,9 +204,11 @@ async def test_pause_remote_key(hass: HomeAssistant) -> None: ) -async def test_play(hass: HomeAssistant) -> None: +async def test_play(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can play media on a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await helper.async_update( ServicesTypes.TELEVISION, @@ -243,9 +255,11 @@ async def test_play(hass: HomeAssistant) -> None: ) -async def test_pause(hass: HomeAssistant) -> None: +async def test_pause(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can turn pause a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await helper.async_update( ServicesTypes.TELEVISION, @@ -291,9 +305,11 @@ async def test_pause(hass: HomeAssistant) -> None: ) -async def test_stop(hass: HomeAssistant) -> None: +async def test_stop(hass: HomeAssistant, get_next_aid: Callable[[], int]) -> None: """Test that we can stop a media player.""" - helper = await setup_test_component(hass, create_tv_service_with_target_media_state) + helper = await setup_test_component( + hass, get_next_aid(), create_tv_service_with_target_media_state + ) await hass.services.async_call( "media_player", @@ -332,9 +348,11 @@ async def test_stop(hass: HomeAssistant) -> None: ) -async def test_tv_set_source(hass: HomeAssistant) -> None: +async def test_tv_set_source( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) await hass.services.async_call( "media_player", @@ -353,9 +371,11 @@ async def test_tv_set_source(hass: HomeAssistant) -> None: assert state.attributes["source"] == "HDMI 2" -async def test_tv_set_source_fail(hass: HomeAssistant) -> None: +async def test_tv_set_source_fail( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can set the input source of a HomeKit TV.""" - helper = await setup_test_component(hass, create_tv_service) + helper = await setup_test_component(hass, get_next_aid(), create_tv_service) with pytest.raises(ValueError): await hass.services.async_call( @@ -370,7 +390,9 @@ async def test_tv_set_source_fail(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a media_player unique id.""" aid = get_next_aid() @@ -379,7 +401,7 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-00:00:00:00:00:00-{aid}-8", ) - await setup_test_component(hass, create_tv_service_with_target_media_state) + await setup_test_component(hass, aid, create_tv_service_with_target_media_state) assert ( entity_registry.async_get(media_player_entry.entity_id).unique_id diff --git a/tests/components/homekit_controller/test_number.py b/tests/components/homekit_controller/test_number.py index 96e2cbe8d4d..fcbcc3ca7a8 100644 --- a/tests/components/homekit_controller/test_number.py +++ b/tests/components/homekit_controller/test_number.py @@ -1,12 +1,14 @@ """Basic checks for HomeKit sensor.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_switch_with_spray_level(accessory): @@ -31,7 +33,9 @@ def create_switch_with_spray_level(accessory): async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a number unique id.""" aid = get_next_aid() @@ -41,7 +45,7 @@ async def test_migrate_unique_id( f"homekit-0001-aid:{aid}-sid:8-cid:9", suggested_object_id="testdevice_spray_quantity", ) - await setup_test_component(hass, create_switch_with_spray_level) + await setup_test_component(hass, aid, create_switch_with_spray_level) assert ( entity_registry.async_get(number.entity_id).unique_id @@ -49,9 +53,13 @@ async def test_migrate_unique_id( ) -async def test_read_number(hass: HomeAssistant) -> None: +async def test_read_number( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_spray_level) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_spray_level + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( @@ -75,9 +83,13 @@ async def test_read_number(hass: HomeAssistant) -> None: assert state.state == "5" -async def test_write_number(hass: HomeAssistant) -> None: +async def test_write_number( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_spray_level) + helper = await setup_test_component( + hass, get_next_aid(), create_switch_with_spray_level + ) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. spray_level = Helper( diff --git a/tests/components/homekit_controller/test_select.py b/tests/components/homekit_controller/test_select.py index b00206e1b0d..cd9357b78d9 100644 --- a/tests/components/homekit_controller/test_select.py +++ b/tests/components/homekit_controller/test_select.py @@ -1,5 +1,7 @@ """Basic checks for HomeKit select entities.""" +from collections.abc import Callable + from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import TemperatureDisplayUnits @@ -8,7 +10,7 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import Helper, get_next_aid, setup_test_component +from .common import Helper, setup_test_component def create_service_with_ecobee_mode(accessory: Accessory): @@ -35,7 +37,9 @@ def create_service_with_temperature_units(accessory: Accessory): async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test we can migrate a select unique id.""" aid = get_next_aid() @@ -46,7 +50,7 @@ async def test_migrate_unique_id( suggested_object_id="testdevice_current_mode", ) - await setup_test_component(hass, create_service_with_ecobee_mode) + await setup_test_component(hass, aid, create_service_with_ecobee_mode) assert ( entity_registry.async_get(select.entity_id).unique_id @@ -54,9 +58,13 @@ async def test_migrate_unique_id( ) -async def test_read_current_mode(hass: HomeAssistant) -> None: +async def test_read_current_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that Ecobee mode can be correctly read and show as human readable text.""" - helper = await setup_test_component(hass, create_service_with_ecobee_mode) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_ecobee_mode + ) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. ecobee_mode = Helper( @@ -92,9 +100,13 @@ async def test_read_current_mode(hass: HomeAssistant) -> None: assert state.state == "away" -async def test_write_current_mode(hass: HomeAssistant) -> None: +async def test_write_current_mode( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test can set a specific mode.""" - helper = await setup_test_component(hass, create_service_with_ecobee_mode) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_ecobee_mode + ) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. @@ -140,9 +152,13 @@ async def test_write_current_mode(hass: HomeAssistant) -> None: ) -async def test_read_select(hass: HomeAssistant) -> None: +async def test_read_select( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test the generic select can read the current value.""" - helper = await setup_test_component(hass, create_service_with_temperature_units) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_temperature_units + ) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. select_entity = Helper( @@ -170,9 +186,13 @@ async def test_read_select(hass: HomeAssistant) -> None: assert state.state == "fahrenheit" -async def test_write_select(hass: HomeAssistant) -> None: +async def test_write_select( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test can set a value.""" - helper = await setup_test_component(hass, create_service_with_temperature_units) + helper = await setup_test_component( + hass, get_next_aid(), create_service_with_temperature_units + ) helper.accessory.services.first(service_type=ServicesTypes.THERMOSTAT) # Helper will be for the primary entity, which is the service. Make a helper for the sensor. diff --git a/tests/components/homekit_controller/test_sensor.py b/tests/components/homekit_controller/test_sensor.py index 461d62742a5..ad896395e75 100644 --- a/tests/components/homekit_controller/test_sensor.py +++ b/tests/components/homekit_controller/test_sensor.py @@ -1,5 +1,6 @@ """Basic checks for HomeKit sensor.""" +from collections.abc import Callable from unittest.mock import patch from aiohomekit.model import Transport @@ -71,10 +72,12 @@ def create_battery_level_sensor(accessory): return service -async def test_temperature_sensor_read_state(hass: HomeAssistant) -> None: +async def test_temperature_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, create_temperature_sensor_service, suffix="temperature" + hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" ) state = await helper.async_update( @@ -97,10 +100,12 @@ async def test_temperature_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["state_class"] == SensorStateClass.MEASUREMENT -async def test_temperature_sensor_not_added_twice(hass: HomeAssistant) -> None: +async def test_temperature_sensor_not_added_twice( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """A standalone temperature sensor should not get a characteristic AND a service entity.""" helper = await setup_test_component( - hass, create_temperature_sensor_service, suffix="temperature" + hass, get_next_aid(), create_temperature_sensor_service, suffix="temperature" ) created_sensors = set() @@ -111,10 +116,12 @@ async def test_temperature_sensor_not_added_twice(hass: HomeAssistant) -> None: assert created_sensors == {helper.entity_id} -async def test_humidity_sensor_read_state(hass: HomeAssistant) -> None: +async def test_humidity_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit humidity sensor accessory.""" helper = await setup_test_component( - hass, create_humidity_sensor_service, suffix="humidity" + hass, get_next_aid(), create_humidity_sensor_service, suffix="humidity" ) state = await helper.async_update( @@ -136,10 +143,12 @@ async def test_humidity_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.HUMIDITY -async def test_light_level_sensor_read_state(hass: HomeAssistant) -> None: +async def test_light_level_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit temperature sensor accessory.""" helper = await setup_test_component( - hass, create_light_level_sensor_service, suffix="light_level" + hass, get_next_aid(), create_light_level_sensor_service, suffix="light_level" ) state = await helper.async_update( @@ -161,10 +170,15 @@ async def test_light_level_sensor_read_state(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.ILLUMINANCE -async def test_carbon_dioxide_level_sensor_read_state(hass: HomeAssistant) -> None: +async def test_carbon_dioxide_level_sensor_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit carbon dioxide sensor accessory.""" helper = await setup_test_component( - hass, create_carbon_dioxide_level_sensor_service, suffix="carbon_dioxide" + hass, + get_next_aid(), + create_carbon_dioxide_level_sensor_service, + suffix="carbon_dioxide", ) state = await helper.async_update( @@ -184,10 +198,12 @@ async def test_carbon_dioxide_level_sensor_read_state(hass: HomeAssistant) -> No assert state.state == "20" -async def test_battery_level_sensor(hass: HomeAssistant) -> None: +async def test_battery_level_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery level sensor.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -211,10 +227,12 @@ async def test_battery_level_sensor(hass: HomeAssistant) -> None: assert state.attributes["device_class"] == SensorDeviceClass.BATTERY -async def test_battery_charging(hass: HomeAssistant) -> None: +async def test_battery_charging( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery's charging state.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -235,10 +253,12 @@ async def test_battery_charging(hass: HomeAssistant) -> None: assert state.attributes["icon"] == "mdi:battery-charging-20" -async def test_battery_low(hass: HomeAssistant) -> None: +async def test_battery_low( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test reading the state of a HomeKit battery's low state.""" helper = await setup_test_component( - hass, create_battery_level_sensor, suffix="battery" + hass, get_next_aid(), create_battery_level_sensor, suffix="battery" ) state = await helper.async_update( @@ -277,9 +297,11 @@ def create_switch_with_sensor(accessory): return service -async def test_switch_with_sensor(hass: HomeAssistant) -> None: +async def test_switch_with_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a switch service that has a sensor characteristic is correctly handled.""" - helper = await setup_test_component(hass, create_switch_with_sensor) + helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) # Helper will be for the primary entity, which is the outlet. Make a helper for the sensor. energy_helper = Helper( @@ -307,9 +329,11 @@ async def test_switch_with_sensor(hass: HomeAssistant) -> None: assert state.state == "50" -async def test_sensor_unavailable(hass: HomeAssistant) -> None: +async def test_sensor_unavailable( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test a sensor becoming unavailable.""" - helper = await setup_test_component(hass, create_switch_with_sensor) + helper = await setup_test_component(hass, get_next_aid(), create_switch_with_sensor) outlet = helper.accessory.services.first(service_type=ServicesTypes.OUTLET) on_char = outlet[CharacteristicsTypes.ON] @@ -383,7 +407,9 @@ def test_thread_status_to_str() -> None: @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") -async def test_rssi_sensor(hass: HomeAssistant) -> None: +async def test_rssi_sensor( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test an rssi sensor.""" inject_bluetooth_service_info(hass, TEST_DEVICE_SERVICE_INFO) @@ -398,14 +424,20 @@ async def test_rssi_sensor(hass: HomeAssistant) -> None: # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, create_battery_level_sensor, suffix="battery", connection="BLE" + hass, + get_next_aid(), + create_battery_level_sensor, + suffix="battery", + connection="BLE", ) assert hass.states.get("sensor.testdevice_signal_strength").state == "-56" @pytest.mark.usefixtures("enable_bluetooth", "entity_registry_enabled_by_default") async def test_migrate_rssi_sensor_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test an rssi sensor unique id migration.""" rssi_sensor = entity_registry.async_get_or_create( @@ -428,7 +460,11 @@ async def test_migrate_rssi_sensor_unique_id( # Any accessory will do for this test, but we need at least # one or the rssi sensor will not be created await setup_test_component( - hass, create_battery_level_sensor, suffix="battery", connection="BLE" + hass, + get_next_aid(), + create_battery_level_sensor, + suffix="battery", + connection="BLE", ) assert hass.states.get("sensor.renamed_rssi").state == "-56" diff --git a/tests/components/homekit_controller/test_storage.py b/tests/components/homekit_controller/test_storage.py index 9523dc9abb7..ab7d7afd6fe 100644 --- a/tests/components/homekit_controller/test_storage.py +++ b/tests/components/homekit_controller/test_storage.py @@ -1,5 +1,6 @@ """Basic checks for entity map storage.""" +from collections.abc import Callable from typing import Any from aiohomekit.model.characteristics import CharacteristicsTypes @@ -72,10 +73,10 @@ def create_lightbulb_service(accessory): async def test_storage_is_updated_on_add( - hass: HomeAssistant, hass_storage: dict[str, Any] + hass: HomeAssistant, hass_storage: dict[str, Any], get_next_aid: Callable[[], int] ) -> None: """Test entity map storage is cleaned up on adding an accessory.""" - await setup_test_component(hass, create_lightbulb_service) + await setup_test_component(hass, get_next_aid(), create_lightbulb_service) entity_map: EntityMapStorage = hass.data[ENTITY_MAP] hkid = "00:00:00:00:00:00" diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index 8a6b2a65e88..1fc49c5c636 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -1,5 +1,7 @@ """Basic checks for HomeKitSwitch.""" +from collections.abc import Callable + from aiohomekit.model.characteristics import ( CharacteristicsTypes, InUseValues, @@ -10,7 +12,7 @@ from aiohomekit.model.services import ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import get_next_aid, setup_test_component +from .common import setup_test_component def create_switch_service(accessory): @@ -50,9 +52,11 @@ def create_char_switch_service(accessory): on_char.value = False -async def test_switch_change_outlet_state(hass: HomeAssistant) -> None: +async def test_switch_change_outlet_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a HomeKit outlet on and off again.""" - helper = await setup_test_component(hass, create_switch_service) + helper = await setup_test_component(hass, get_next_aid(), create_switch_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -75,9 +79,11 @@ async def test_switch_change_outlet_state(hass: HomeAssistant) -> None: ) -async def test_switch_read_outlet_state(hass: HomeAssistant) -> None: +async def test_switch_read_outlet_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit outlet accessory.""" - helper = await setup_test_component(hass, create_switch_service) + helper = await setup_test_component(hass, get_next_aid(), create_switch_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -108,9 +114,11 @@ async def test_switch_read_outlet_state(hass: HomeAssistant) -> None: assert switch_1.attributes["outlet_in_use"] is True -async def test_valve_change_active_state(hass: HomeAssistant) -> None: +async def test_valve_change_active_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a valve on and off again.""" - helper = await setup_test_component(hass, create_valve_service) + helper = await setup_test_component(hass, get_next_aid(), create_valve_service) await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.testdevice"}, blocking=True @@ -133,9 +141,11 @@ async def test_valve_change_active_state(hass: HomeAssistant) -> None: ) -async def test_valve_read_state(hass: HomeAssistant) -> None: +async def test_valve_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a valve accessory.""" - helper = await setup_test_component(hass, create_valve_service) + helper = await setup_test_component(hass, get_next_aid(), create_valve_service) # Initial state is that the switch is off and the outlet isn't in use switch_1 = await helper.poll_and_get_state() @@ -166,10 +176,12 @@ async def test_valve_read_state(hass: HomeAssistant) -> None: assert switch_1.attributes["in_use"] is False -async def test_char_switch_change_state(hass: HomeAssistant) -> None: +async def test_char_switch_change_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can turn a characteristic on and off again.""" helper = await setup_test_component( - hass, create_char_switch_service, suffix="pairing_mode" + hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" ) await hass.services.async_call( @@ -199,10 +211,12 @@ async def test_char_switch_change_state(hass: HomeAssistant) -> None: ) -async def test_char_switch_read_state(hass: HomeAssistant) -> None: +async def test_char_switch_read_state( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: """Test that we can read the state of a HomeKit characteristic switch.""" helper = await setup_test_component( - hass, create_char_switch_service, suffix="pairing_mode" + hass, get_next_aid(), create_char_switch_service, suffix="pairing_mode" ) # Simulate that someone switched on the device in the real world not via HA @@ -221,7 +235,9 @@ async def test_char_switch_read_state(hass: HomeAssistant) -> None: async def test_migrate_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + get_next_aid: Callable[[], int], ) -> None: """Test a we can migrate a switch unique id.""" aid = get_next_aid() @@ -235,7 +251,9 @@ async def test_migrate_unique_id( "homekit_controller", f"homekit-0001-aid:{aid}-sid:8-cid:9", ) - await setup_test_component(hass, create_char_switch_service, suffix="pairing_mode") + await setup_test_component( + hass, aid, create_char_switch_service, suffix="pairing_mode" + ) assert ( entity_registry.async_get(switch_entry.entity_id).unique_id diff --git a/tests/components/homewizard/conftest.py b/tests/components/homewizard/conftest.py index eb638492941..fcfe1e5c189 100644 --- a/tests/components/homewizard/conftest.py +++ b/tests/components/homewizard/conftest.py @@ -1,11 +1,11 @@ """Fixtures for HomeWizard integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from homewizard_energy.errors import NotFoundError from homewizard_energy.models import Data, Device, State, System import pytest -from typing_extensions import Generator from homeassistant.components.homewizard.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS diff --git a/tests/components/homewizard/snapshots/test_button.ambr b/tests/components/homewizard/snapshots/test_button.ambr index eabaeb648aa..d5ad9770478 100644 --- a/tests/components/homewizard/snapshots/test_button.ambr +++ b/tests/components/homewizard/snapshots/test_button.ambr @@ -71,7 +71,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_diagnostics.ambr b/tests/components/homewizard/snapshots/test_diagnostics.ambr index 7b82056aacb..f8ac80f2536 100644 --- a/tests/components/homewizard/snapshots/test_diagnostics.ambr +++ b/tests/components/homewizard/snapshots/test_diagnostics.ambr @@ -65,6 +65,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'HWE-KWH1', + 'name': 'Wi-Fi kWh Meter 1-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'kWh meter', 'product_type': 'HWE-KWH1', 'serial': '**REDACTED**', @@ -148,6 +154,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'HWE-KWH3', + 'name': 'Wi-Fi kWh Meter 3-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'KWh meter 3-phase', 'product_type': 'HWE-KWH3', 'serial': '**REDACTED**', @@ -282,6 +294,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.19', + 'product': dict({ + 'description': 'The HomeWizard P1 Meter gives you detailed insight in your electricity-, gas consumption and solar surplus.', + 'model': 'HWE-P1', + 'name': 'Wi-Fi P1 Meter', + 'url': 'https://www.homewizard.com/p1-meter/', + }), 'product_name': 'P1 meter', 'product_type': 'HWE-P1', 'serial': '**REDACTED**', @@ -365,6 +383,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.03', + 'product': dict({ + 'description': 'Measure and switch every device.', + 'model': 'HWE-SKT', + 'name': 'Wi-Fi Energy Socket', + 'url': 'https://www.homewizard.com/energy-socket/', + }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -452,6 +476,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '4.07', + 'product': dict({ + 'description': 'Measure and switch every device.', + 'model': 'HWE-SKT', + 'name': 'Wi-Fi Energy Socket', + 'url': 'https://www.homewizard.com/energy-socket/', + }), 'product_name': 'Energy Socket', 'product_type': 'HWE-SKT', 'serial': '**REDACTED**', @@ -539,6 +569,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '2.03', + 'product': dict({ + 'description': 'Real-time water consumption insights', + 'model': 'HWE-WTR', + 'name': 'Wi-Fi Watermeter', + 'url': 'https://www.homewizard.com/watermeter/', + }), 'product_name': 'Watermeter', 'product_type': 'HWE-WTR', 'serial': '**REDACTED**', @@ -622,6 +658,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'SDM230-wifi', + 'name': 'Wi-Fi kWh Meter 1-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'kWh meter', 'product_type': 'SDM230-wifi', 'serial': '**REDACTED**', @@ -705,6 +747,12 @@ 'device': dict({ 'api_version': 'v1', 'firmware_version': '3.06', + 'product': dict({ + 'description': 'Measure solar panels, car chargers and more.', + 'model': 'SDM630-wifi', + 'name': 'Wi-Fi kWh Meter 3-phase', + 'url': 'https://www.homewizard.com/kwh-meter/', + }), 'product_name': 'KWh meter 3-phase', 'product_type': 'SDM630-wifi', 'serial': '**REDACTED**', diff --git a/tests/components/homewizard/snapshots/test_number.ambr b/tests/components/homewizard/snapshots/test_number.ambr index f292847f2a2..768255c7508 100644 --- a/tests/components/homewizard/snapshots/test_number.ambr +++ b/tests/components/homewizard/snapshots/test_number.ambr @@ -80,7 +80,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -171,7 +172,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 27dfd6399c7..63ee9312a13 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -57,7 +57,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -143,7 +144,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -229,7 +231,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -315,7 +318,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -401,7 +405,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -487,7 +492,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -576,7 +582,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -662,7 +669,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -748,7 +756,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -834,7 +843,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -915,7 +925,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1000,7 +1011,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1086,7 +1098,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1172,7 +1185,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1258,7 +1272,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1344,7 +1359,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1430,7 +1446,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1516,7 +1533,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1602,7 +1620,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1688,7 +1707,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1774,7 +1794,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1860,7 +1881,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -1946,7 +1968,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2035,7 +2058,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2121,7 +2145,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2207,7 +2232,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2293,7 +2319,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2382,7 +2409,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2471,7 +2499,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2560,7 +2589,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2646,7 +2676,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2732,7 +2763,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2818,7 +2850,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2904,7 +2937,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -2990,7 +3024,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3076,7 +3111,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3162,7 +3198,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3243,7 +3280,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3328,7 +3366,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3411,7 +3450,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3497,7 +3537,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3583,7 +3624,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3669,7 +3711,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3750,7 +3793,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3836,7 +3880,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -3922,7 +3967,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4008,7 +4054,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4094,7 +4141,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4180,7 +4228,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4266,7 +4315,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4352,7 +4402,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4438,7 +4489,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4524,7 +4576,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4610,7 +4663,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4696,7 +4750,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4777,7 +4832,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4860,7 +4916,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -4949,7 +5006,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5030,7 +5088,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5119,7 +5178,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5208,7 +5268,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5297,7 +5358,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5378,7 +5440,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5459,7 +5522,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5554,7 +5618,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5640,7 +5705,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5726,7 +5792,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5812,7 +5879,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5898,7 +5966,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -5979,7 +6048,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6060,7 +6130,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6141,7 +6212,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6222,7 +6294,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6303,7 +6376,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6384,7 +6458,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6469,7 +6544,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6550,7 +6626,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -6632,6 +6709,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, 'primary_config_entry': , @@ -6714,6 +6792,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -6796,6 +6875,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -6877,6 +6957,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, 'primary_config_entry': , @@ -6959,6 +7040,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Water meter', 'name_by_user': None, 'primary_config_entry': , @@ -7044,7 +7126,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7127,7 +7210,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7213,7 +7297,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7299,7 +7384,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7385,7 +7471,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7466,7 +7553,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7552,7 +7640,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7638,7 +7727,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7724,7 +7814,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7810,7 +7901,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7896,7 +7988,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -7982,7 +8075,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8068,7 +8162,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8154,7 +8249,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8240,7 +8336,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8326,7 +8423,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8412,7 +8510,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8493,7 +8592,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8576,7 +8676,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8665,7 +8766,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8746,7 +8848,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8835,7 +8938,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -8924,7 +9028,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9013,7 +9118,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9094,7 +9200,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9175,7 +9282,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9270,7 +9378,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9356,7 +9465,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9442,7 +9552,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9528,7 +9639,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9614,7 +9726,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9695,7 +9808,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9776,7 +9890,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9857,7 +9972,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -9938,7 +10054,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10019,7 +10136,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10100,7 +10218,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10185,7 +10304,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10266,7 +10386,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10348,6 +10469,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Gas meter', 'name_by_user': None, 'primary_config_entry': , @@ -10430,6 +10552,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -10512,6 +10635,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Inlet heat meter', 'name_by_user': None, 'primary_config_entry': , @@ -10593,6 +10717,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Warm water meter', 'name_by_user': None, 'primary_config_entry': , @@ -10675,6 +10800,7 @@ }), 'manufacturer': 'HomeWizard', 'model': 'HWE-P1', + 'model_id': None, 'name': 'Water meter', 'name_by_user': None, 'primary_config_entry': , @@ -10760,7 +10886,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10843,7 +10970,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -10929,7 +11057,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11015,7 +11144,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11101,7 +11231,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11187,7 +11318,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11273,7 +11405,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11359,7 +11492,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11445,7 +11579,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11531,7 +11666,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11617,7 +11753,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11703,7 +11840,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11789,7 +11927,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11875,7 +12014,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -11961,7 +12101,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12047,7 +12188,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12128,7 +12270,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12217,7 +12360,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12298,7 +12442,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12387,7 +12532,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12476,7 +12622,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12565,7 +12712,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12651,7 +12799,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12737,7 +12886,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12823,7 +12973,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12909,7 +13060,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -12990,7 +13142,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13071,7 +13224,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13152,7 +13306,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13233,7 +13388,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13314,7 +13470,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13395,7 +13552,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-P1', + 'model': 'Wi-Fi P1 Meter', + 'model_id': 'HWE-P1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13480,7 +13638,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13566,7 +13725,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13652,7 +13812,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13741,7 +13902,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13830,7 +13992,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13911,7 +14074,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -13996,7 +14160,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14082,7 +14247,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14168,7 +14334,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14254,7 +14421,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14340,7 +14508,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14426,7 +14595,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14515,7 +14685,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14601,7 +14772,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14690,7 +14862,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14776,7 +14949,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14862,7 +15036,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -14943,7 +15118,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15028,7 +15204,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15114,7 +15291,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15199,7 +15377,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15280,7 +15459,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15365,7 +15545,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15451,7 +15632,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15537,7 +15719,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15623,7 +15806,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15709,7 +15893,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15795,7 +15980,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15884,7 +16070,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -15970,7 +16157,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16056,7 +16244,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16142,7 +16331,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16223,7 +16413,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16308,7 +16499,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16394,7 +16586,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16480,7 +16673,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16566,7 +16760,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16652,7 +16847,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16738,7 +16934,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16824,7 +17021,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16910,7 +17108,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -16996,7 +17195,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17082,7 +17282,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17168,7 +17369,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17254,7 +17456,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17343,7 +17546,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17429,7 +17633,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17515,7 +17720,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17601,7 +17807,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17690,7 +17897,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17779,7 +17987,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17868,7 +18077,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -17954,7 +18164,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18040,7 +18251,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18126,7 +18338,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18212,7 +18425,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18298,7 +18512,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18384,7 +18599,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18470,7 +18686,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -18551,7 +18768,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/snapshots/test_switch.ambr b/tests/components/homewizard/snapshots/test_switch.ambr index ba630e2f0b4..68a351c1ebb 100644 --- a/tests/components/homewizard/snapshots/test_switch.ambr +++ b/tests/components/homewizard/snapshots/test_switch.ambr @@ -70,7 +70,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH1', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'HWE-KWH1', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -151,7 +152,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-KWH3', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'HWE-KWH3', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -233,7 +235,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -314,7 +317,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -395,7 +399,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -477,7 +482,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -558,7 +564,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -639,7 +646,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-SKT', + 'model': 'Wi-Fi Energy Socket', + 'model_id': 'HWE-SKT', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -720,7 +728,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'HWE-WTR', + 'model': 'Wi-Fi Watermeter', + 'model_id': 'HWE-WTR', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -801,7 +810,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM230-wifi', + 'model': 'Wi-Fi kWh Meter 1-phase', + 'model_id': 'SDM230-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , @@ -882,7 +892,8 @@ 'labels': set({ }), 'manufacturer': 'HomeWizard', - 'model': 'SDM630-wifi', + 'model': 'Wi-Fi kWh Meter 3-phase', + 'model_id': 'SDM630-wifi', 'name': 'Device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/homewizard/test_init.py b/tests/components/homewizard/test_init.py index 969be7a604c..33412900677 100644 --- a/tests/components/homewizard/test_init.py +++ b/tests/components/homewizard/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import MagicMock -from homewizard_energy.errors import DisabledError, HomeWizardEnergyException +from homewizard_energy.errors import DisabledError import pytest from homeassistant.components.homewizard.const import DOMAIN @@ -97,152 +97,6 @@ async def test_load_removes_reauth_flow( assert len(flows) == 0 -@pytest.mark.parametrize( - "exception", - [ - HomeWizardEnergyException, - Exception, - ], -) -async def test_load_handles_homewizardenergy_exception( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_homewizardenergy: MagicMock, - exception: Exception, -) -> None: - """Test setup handles exception from API.""" - mock_homewizardenergy.device.side_effect = exception - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - assert mock_config_entry.state in ( - ConfigEntryState.SETUP_RETRY, - ConfigEntryState.SETUP_ERROR, - ) - - -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ], -) -@pytest.mark.usefixtures("mock_homewizardenergy") -async def test_sensor_migration( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, -) -> None: - """Test total power T1 sensors are migrated.""" - mock_config_entry.add_to_hass(hass) - - entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) - - assert entity.unique_id == old_unique_id - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - entity_migrated = entity_registry.async_get(entity.entity_id) - assert entity_migrated - assert entity_migrated.unique_id == new_unique_id - assert entity_migrated.previous_unique_id == old_unique_id - - -@pytest.mark.parametrize( - ("device_fixture", "old_unique_id", "new_unique_id"), - [ - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-11", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_import_t1_kwh", - "aabbccddeeff_total_power_import_kwh", - ), - ( - "HWE-SKT-21", - "aabbccddeeff_total_power_export_t1_kwh", - "aabbccddeeff_total_power_export_kwh", - ), - ], -) -@pytest.mark.usefixtures("mock_homewizardenergy") -async def test_sensor_migration_does_not_trigger( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_config_entry: MockConfigEntry, - old_unique_id: str, - new_unique_id: str, -) -> None: - """Test total power T1 sensors are not migrated when not possible.""" - mock_config_entry.add_to_hass(hass) - - old_entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=old_unique_id, - config_entry=mock_config_entry, - ) - - new_entity: er.RegistryEntry = entity_registry.async_get_or_create( - domain=Platform.SENSOR, - platform=DOMAIN, - unique_id=new_unique_id, - config_entry=mock_config_entry, - ) - - assert old_entity.unique_id == old_unique_id - assert new_entity.unique_id == new_unique_id - - assert await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - entity = entity_registry.async_get(old_entity.entity_id) - assert entity - assert entity.unique_id == old_unique_id - assert entity.previous_unique_id is None - - entity = entity_registry.async_get(new_entity.entity_id) - assert entity - assert entity.unique_id == new_unique_id - assert entity.previous_unique_id is None - - @pytest.mark.parametrize( ("device_fixture", "old_unique_id", "new_unique_id"), [ diff --git a/tests/components/homeworks/conftest.py b/tests/components/homeworks/conftest.py index ca0e08e9215..9562063ab97 100644 --- a/tests/components/homeworks/conftest.py +++ b/tests/components/homeworks/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Lutron Homeworks Series 4 and 8 tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.homeworks.const import ( CONF_ADDR, @@ -17,10 +17,55 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from tests.common import MockConfigEntry +CONFIG_ENTRY_OPTIONS = { + CONF_CONTROLLER_ID: "main_controller", + CONF_HOST: "192.168.0.1", + CONF_PORT: 1234, + CONF_DIMMERS: [ + { + CONF_ADDR: "[02:08:01:01]", + CONF_NAME: "Foyer Sconces", + CONF_RATE: 1.0, + } + ], + CONF_KEYPADS: [ + { + CONF_ADDR: "[02:08:02:01]", + CONF_NAME: "Foyer Keypad", + CONF_BUTTONS: [ + { + CONF_NAME: "Morning", + CONF_NUMBER: 1, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Relax", + CONF_NUMBER: 2, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Dim up", + CONF_NUMBER: 3, + CONF_LED: False, + CONF_RELEASE_DELAY: 0.2, + }, + ], + } + ], +} + @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -28,45 +73,19 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="Lutron Homeworks", domain=DOMAIN, - data={}, - options={ - CONF_CONTROLLER_ID: "main_controller", - CONF_HOST: "192.168.0.1", - CONF_PORT: 1234, - CONF_DIMMERS: [ - { - CONF_ADDR: "[02:08:01:01]", - CONF_NAME: "Foyer Sconces", - CONF_RATE: 1.0, - } - ], - CONF_KEYPADS: [ - { - CONF_ADDR: "[02:08:02:01]", - CONF_NAME: "Foyer Keypad", - CONF_BUTTONS: [ - { - CONF_NAME: "Morning", - CONF_NUMBER: 1, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Relax", - CONF_NUMBER: 2, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Dim up", - CONF_NUMBER: 3, - CONF_LED: False, - CONF_RELEASE_DELAY: 0.2, - }, - ], - } - ], - }, + data={CONF_PASSWORD: None, CONF_USERNAME: None}, + options=CONFIG_ENTRY_OPTIONS, + ) + + +@pytest.fixture +def mock_config_entry_username_password() -> MockConfigEntry: + """Return the default mocked config entry with credentials.""" + return MockConfigEntry( + title="Lutron Homeworks", + domain=DOMAIN, + data={CONF_PASSWORD: "hunter2", CONF_USERNAME: "username"}, + options=CONFIG_ENTRY_OPTIONS, ) diff --git a/tests/components/homeworks/test_binary_sensor.py b/tests/components/homeworks/test_binary_sensor.py index 0b21ae3b773..4bd42cc0a59 100644 --- a/tests/components/homeworks/test_binary_sensor.py +++ b/tests/components/homeworks/test_binary_sensor.py @@ -30,7 +30,7 @@ async def test_binary_sensor_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert entity_id in hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN) diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 8f5334b21f9..d0693531006 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -2,6 +2,7 @@ from unittest.mock import ANY, MagicMock +from pyhomeworks import exceptions as hw_exceptions import pytest from pytest_unordered import unordered @@ -17,7 +18,13 @@ from homeassistant.components.homeworks.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -45,7 +52,7 @@ async def test_user_flow( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Main controller" - assert result["data"] == {} + assert result["data"] == {"password": None, "username": None} assert result["options"] == { "controller_id": "main_controller", "dimmers": [], @@ -53,9 +60,107 @@ async def test_user_flow( "keypads": [], "port": 1234, } - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) mock_controller.close.assert_called_once_with() - mock_controller.join.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": "hunter2", "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_user_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": None, "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, "username", None) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_password_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "need_username_with_password"} async def test_user_flow_already_exists( @@ -96,7 +201,12 @@ async def test_user_flow_already_exists( @pytest.mark.parametrize( ("side_effect", "error"), - [(ConnectionError, "connection_error"), (Exception, "unknown_error")], + [ + (hw_exceptions.HomeworksConnectionFailed, "connection_error"), + (hw_exceptions.HomeworksInvalidCredentialsProvided, "invalid_credentials"), + (hw_exceptions.HomeworksNoCredentialsProvided, "credentials_needed"), + (Exception, "unknown_error"), + ], ) async def test_user_flow_cannot_connect( hass: HomeAssistant, @@ -266,6 +376,32 @@ async def test_reconfigure_flow_flow_no_change( } +async def test_reconfigure_flow_credentials_password_only( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.2", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {"base": "need_username_with_password"} + + async def test_options_add_light_flow( hass: HomeAssistant, mock_empty_config_entry: MockConfigEntry, @@ -432,7 +568,14 @@ async def test_options_add_remove_light_flow( ) -@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"]) +@pytest.mark.parametrize( + "keypad_address", + [ + "[02:08:03]", + "[02:08:03:01]", + "[02:08:03:01:00]", + ], +) async def test_options_add_remove_keypad_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry, diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index 87aabb6258f..2a4bd28138e 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -2,12 +2,18 @@ from unittest.mock import ANY, MagicMock -from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED +from pyhomeworks import exceptions as hw_exceptions +from pyhomeworks.pyhomeworks import ( + HW_BUTTON_PRESSED, + HW_BUTTON_RELEASED, + HW_LOGIN_INCORRECT, +) import pytest from homeassistant.components.homeworks import EVENT_BUTTON_PRESS, EVENT_BUTTON_RELEASE from homeassistant.components.homeworks.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -25,7 +31,7 @@ async def test_load_unload_config_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -34,13 +40,60 @@ async def test_load_unload_config_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED +async def test_load_config_entry_with_credentials( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test the Homeworks configuration entry loading/unloading.""" + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + + await hass.config_entries.async_unload(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry_username_password.state is ConfigEntryState.NOT_LOADED + + +async def test_controller_credentials_changed( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test controller credentials changed. + + Note: This just ensures we don't blow up when credentials changed, in the future a + reauth flow should be added. + """ + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + hw_callback = mock_homeworks.mock_calls[0][1][2] + + hw_callback(HW_LOGIN_INCORRECT, []) + + async def test_config_entry_not_ready( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock, ) -> None: """Test the Homeworks configuration entry not ready.""" - mock_homeworks.side_effect = ConnectionError + mock_homeworks.return_value.connect.side_effect = ( + hw_exceptions.HomeworksConnectionFailed + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -62,7 +115,7 @@ async def test_keypad_events( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] hw_callback(HW_BUTTON_PRESSED, ["[02:08:02:01]", 1]) @@ -165,3 +218,25 @@ async def test_send_command( blocking=True, ) assert len(mock_controller._send.mock_calls) == 0 + + +async def test_cleanup_on_ha_shutdown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test cleanup when HA shuts down.""" + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) + mock_controller.stop.assert_not_called() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + mock_controller.stop.assert_called_once_with() diff --git a/tests/components/homeworks/test_light.py b/tests/components/homeworks/test_light.py index a5d94f736d5..1cd2951128c 100644 --- a/tests/components/homeworks/test_light.py +++ b/tests/components/homeworks/test_light.py @@ -35,7 +35,7 @@ async def test_light_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert len(mock_controller.request_dimmer_level.mock_calls) == 1 @@ -106,7 +106,7 @@ async def test_light_restore_brightness( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert hass.states.async_entity_ids("light") == unordered([entity_id]) diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index 5c5b6c0a44a..e48664db9ae 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -86,6 +86,7 @@ def device(): mock_device.system_mode = "off" mock_device.name = "device1" mock_device.current_temperature = CURRENTTEMPERATURE + mock_device.temperature_unit = "C" mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index d1faf9af9a0..25bb73851c6 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -3,7 +3,7 @@ ReadOnlyDict({ 'aux_heat': 'off', 'current_humidity': 50, - 'current_temperature': -6.7, + 'current_temperature': 20, 'fan_action': 'idle', 'fan_mode': 'auto', 'fan_modes': list([ @@ -20,9 +20,9 @@ , ]), 'max_humidity': 99, - 'max_temp': 1.7, + 'max_temp': 35, 'min_humidity': 30, - 'min_temp': -13.9, + 'min_temp': 7, 'permanent_hold': False, 'preset_mode': 'none', 'preset_modes': list([ diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index b57be5f1838..55a55f7d7e7 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -92,14 +92,13 @@ async def test_dynamic_attributes( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: """Test dynamic attributes.""" - await init_integration(hass, config_entry) entity_id = f"climate.{device.name}" state = hass.states.get(entity_id) assert state.state == HVACMode.OFF attributes = state.attributes - assert attributes["current_temperature"] == -6.7 + assert attributes["current_temperature"] == 20 assert attributes["current_humidity"] == 50 device.system_mode = "cool" @@ -114,7 +113,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL attributes = state.attributes - assert attributes["current_temperature"] == -6.1 + assert attributes["current_temperature"] == 21 assert attributes["current_humidity"] == 55 device.system_mode = "heat" @@ -129,7 +128,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 device.system_mode = "auto" @@ -142,7 +141,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 @@ -348,7 +347,7 @@ async def test_service_calls_off_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) @@ -362,8 +361,8 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -375,13 +374,13 @@ async def test_service_calls_off_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 24.0, + ATTR_TARGET_TEMP_HIGH: 34.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(34) + device.set_setpoint_heat.assert_called_with(24) assert "Invalid temperature" in caplog.text device.set_setpoint_heat.reset_mock() @@ -399,14 +398,14 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) reset_mock(device) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) device.set_setpoint_heat.assert_not_called() @@ -517,7 +516,7 @@ async def test_service_calls_cool_mode( {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 15) device.set_hold_cool.reset_mock() await hass.services.async_call( @@ -525,13 +524,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) caplog.clear() device.set_setpoint_cool.reset_mock() @@ -543,13 +542,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -733,10 +732,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() device.set_hold_heat.side_effect = aiosomecomfort.SomeComfortError @@ -744,10 +743,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() assert "Invalid temperature" in caplog.text @@ -756,10 +755,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() caplog.clear() @@ -773,8 +772,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -789,8 +788,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -984,8 +983,8 @@ async def test_service_calls_auto_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_once_with(95) - device.set_setpoint_heat.assert_called_once_with(77) + device.set_setpoint_cool.assert_called_once_with(35) + device.set_setpoint_heat.assert_called_once_with(25) reset_mock(device) caplog.clear() diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 20dfe0a3710..7f29f8a4b9f 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -63,7 +63,7 @@ PRIVATE_ADDRESSES = [ ] -async def mock_handler(request): +async def mock_handler(request: web.Request) -> web.Response: """Return if request was authenticated.""" if not request[KEY_AUTHENTICATED]: raise HTTPUnauthorized @@ -75,7 +75,7 @@ async def mock_handler(request): @pytest.fixture -def app(hass): +def app(hass: HomeAssistant) -> web.Application: """Fixture to set up a web.Application.""" app = web.Application() app[KEY_HASS] = hass @@ -85,7 +85,7 @@ def app(hass): @pytest.fixture -def app2(hass): +def app2(hass: HomeAssistant) -> web.Application: """Fixture to set up a web.Application without real_ip middleware.""" app = web.Application() app[KEY_HASS] = hass @@ -94,7 +94,9 @@ def app2(hass): @pytest.fixture -def trusted_networks_auth(hass): +def trusted_networks_auth( + hass: HomeAssistant, +) -> trusted_networks.TrustedNetworksAuthProvider: """Load trusted networks auth provider.""" prv = trusted_networks.TrustedNetworksAuthProvider( hass, @@ -114,7 +116,7 @@ async def test_auth_middleware_loaded_by_default(hass: HomeAssistant) -> None: async def test_cant_access_with_password_in_header( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -131,7 +133,7 @@ async def test_cant_access_with_password_in_header( async def test_cant_access_with_password_in_query( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -151,7 +153,7 @@ async def test_cant_access_with_password_in_query( async def test_basic_auth_does_not_work( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass: HomeAssistant, local_auth: HassAuthProvider, @@ -175,8 +177,8 @@ async def test_basic_auth_does_not_work( async def test_cannot_access_with_trusted_ip( hass: HomeAssistant, - app2, - trusted_networks_auth, + app2: web.Application, + trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -203,7 +205,7 @@ async def test_cannot_access_with_trusted_ip( async def test_auth_active_access_with_access_token_in_header( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -239,8 +241,8 @@ async def test_auth_active_access_with_access_token_in_header( async def test_auth_active_access_with_trusted_ip( hass: HomeAssistant, - app2, - trusted_networks_auth, + app2: web.Application, + trusted_networks_auth: trusted_networks.TrustedNetworksAuthProvider, aiohttp_client: ClientSessionGenerator, hass_owner_user: MockUser, ) -> None: @@ -266,7 +268,7 @@ async def test_auth_active_access_with_trusted_ip( async def test_auth_legacy_support_api_password_cannot_access( - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, local_auth: HassAuthProvider, hass: HomeAssistant, @@ -287,7 +289,7 @@ async def test_auth_legacy_support_api_password_cannot_access( async def test_auth_access_signed_path_with_refresh_token( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -332,7 +334,7 @@ async def test_auth_access_signed_path_with_refresh_token( async def test_auth_access_signed_path_with_query_param( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -362,7 +364,7 @@ async def test_auth_access_signed_path_with_query_param( async def test_auth_access_signed_path_with_query_param_order( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -403,7 +405,7 @@ async def test_auth_access_signed_path_with_query_param_order( async def test_auth_access_signed_path_with_query_param_safe_param( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -440,7 +442,7 @@ async def test_auth_access_signed_path_with_query_param_safe_param( ) async def test_auth_access_signed_path_with_query_param_tamper( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, base_url: str, @@ -466,7 +468,7 @@ async def test_auth_access_signed_path_with_query_param_tamper( async def test_auth_access_signed_path_via_websocket( hass: HomeAssistant, - app, + app: web.Application, hass_ws_client: WebSocketGenerator, hass_read_only_access_token: str, ) -> None: @@ -504,7 +506,7 @@ async def test_auth_access_signed_path_via_websocket( async def test_auth_access_signed_path_with_http( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -539,7 +541,7 @@ async def test_auth_access_signed_path_with_http( async def test_auth_access_signed_path_with_content_user( - hass: HomeAssistant, app, aiohttp_client: ClientSessionGenerator + hass: HomeAssistant, app: web.Application, aiohttp_client: ClientSessionGenerator ) -> None: """Test access signed url uses content user.""" await async_setup_auth(hass, app) @@ -556,7 +558,7 @@ async def test_auth_access_signed_path_with_content_user( async def test_local_only_user_rejected( hass: HomeAssistant, - app, + app: web.Application, aiohttp_client: ClientSessionGenerator, hass_access_token: str, ) -> None: @@ -579,7 +581,9 @@ async def test_local_only_user_rejected( assert req.status == HTTPStatus.UNAUTHORIZED -async def test_async_user_not_allowed_do_auth(hass: HomeAssistant, app) -> None: +async def test_async_user_not_allowed_do_auth( + hass: HomeAssistant, app: web.Application +) -> None: """Test for not allowing auth.""" user = await hass.auth.async_create_user("Hello") user.is_active = False diff --git a/tests/components/http/test_static.py b/tests/components/http/test_static.py index 3e3f21d5002..2ac7c6ded93 100644 --- a/tests/components/http/test_static.py +++ b/tests/components/http/test_static.py @@ -4,12 +4,12 @@ from http import HTTPStatus from pathlib import Path from aiohttp.test_utils import TestClient -from aiohttp.web_exceptions import HTTPForbidden import pytest from homeassistant.components.http import StaticPathConfig -from homeassistant.components.http.static import CachingStaticResource, _get_file_path -from homeassistant.core import EVENT_HOMEASSISTANT_START, HomeAssistant +from homeassistant.components.http.static import CachingStaticResource +from homeassistant.const import EVENT_HOMEASSISTANT_START +from homeassistant.core import HomeAssistant from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS from homeassistant.setup import async_setup_component @@ -30,37 +30,19 @@ async def mock_http_client(hass: HomeAssistant, aiohttp_client: ClientSessionGen return await aiohttp_client(hass.http.app, server_kwargs={"skip_url_asserts": True}) -@pytest.mark.parametrize( - ("url", "canonical_url"), - [ - ("//a", "//a"), - ("///a", "///a"), - ("/c:\\a\\b", "/c:%5Ca%5Cb"), - ], -) -async def test_static_path_blocks_anchors( - hass: HomeAssistant, - mock_http_client: TestClient, - tmp_path: Path, - url: str, - canonical_url: str, +async def test_static_resource_show_index( + hass: HomeAssistant, mock_http_client: TestClient, tmp_path: Path ) -> None: - """Test static paths block anchors.""" + """Test static resource will return a directory index.""" app = hass.http.app - resource = CachingStaticResource(url, str(tmp_path)) - assert resource.canonical == canonical_url + resource = CachingStaticResource("/", tmp_path, show_index=True) app.router.register_resource(resource) app[KEY_ALLOW_CONFIGURED_CORS](resource) - resp = await mock_http_client.get(canonical_url, allow_redirects=False) - assert resp.status == 403 - - # Tested directly since aiohttp will block it before - # it gets here but we want to make sure if aiohttp ever - # changes we still block it. - with pytest.raises(HTTPForbidden): - _get_file_path(canonical_url, tmp_path) + resp = await mock_http_client.get("/") + assert resp.status == 200 + assert resp.content_type == "text/html" async def test_async_register_static_paths( diff --git a/tests/components/hue/conftest.py b/tests/components/hue/conftest.py index fca950d6b7a..7fc6c5ae33f 100644 --- a/tests/components/hue/conftest.py +++ b/tests/components/hue/conftest.py @@ -2,7 +2,7 @@ import asyncio from collections import deque -import json +from collections.abc import Generator import logging from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -16,27 +16,24 @@ from homeassistant.components import hue from homeassistant.components.hue.v1 import sensor_base as hue_sensor_base from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType from .const import FAKE_BRIDGE, FAKE_BRIDGE_DEVICE -from tests.common import ( - MockConfigEntry, - async_mock_service, - load_fixture, - mock_device_registry, -) +from tests.common import MockConfigEntry, load_json_array_fixture @pytest.fixture(autouse=True) -def no_request_delay(): +def no_request_delay() -> Generator[None]: """Make the request refresh delay 0 for instant tests.""" with patch("homeassistant.components.hue.const.REQUEST_REFRESH_DELAY", 0): yield -def create_mock_bridge(hass, api_version=1): +def create_mock_bridge(hass: HomeAssistant, api_version: int = 1) -> Mock: """Create a mocked HueBridge instance.""" bridge = Mock( hass=hass, @@ -50,10 +47,10 @@ def create_mock_bridge(hass, api_version=1): bridge.logger = logging.getLogger(__name__) if bridge.api_version == 2: - bridge.api = create_mock_api_v2(hass) + bridge.api = create_mock_api_v2() bridge.mock_requests = bridge.api.mock_requests else: - bridge.api = create_mock_api_v1(hass) + bridge.api = create_mock_api_v1() bridge.sensor_manager = hue_sensor_base.SensorManager(bridge) bridge.mock_requests = bridge.api.mock_requests bridge.mock_light_responses = bridge.api.mock_light_responses @@ -85,18 +82,18 @@ def create_mock_bridge(hass, api_version=1): @pytest.fixture -def mock_api_v1(hass): +def mock_api_v1() -> Mock: """Mock the Hue V1 api.""" - return create_mock_api_v1(hass) + return create_mock_api_v1() @pytest.fixture -def mock_api_v2(hass): +def mock_api_v2() -> Mock: """Mock the Hue V2 api.""" - return create_mock_api_v2(hass) + return create_mock_api_v2() -def create_mock_api_v1(hass): +def create_mock_api_v1() -> Mock: """Create a mock V1 API.""" api = Mock(spec=aiohue_v1.HueBridgeV1) api.initialize = AsyncMock() @@ -140,12 +137,12 @@ def create_mock_api_v1(hass): @pytest.fixture(scope="package") -def v2_resources_test_data(): +def v2_resources_test_data() -> JsonArrayType: """Load V2 resources mock data.""" - return json.loads(load_fixture("hue/v2_resources.json")) + return load_json_array_fixture("hue/v2_resources.json") -def create_mock_api_v2(hass): +def create_mock_api_v2() -> Mock: """Create a mock V2 API.""" api = Mock(spec=aiohue_v2.HueBridgeV2) api.initialize = AsyncMock() @@ -198,30 +195,32 @@ def create_mock_api_v2(hass): @pytest.fixture -def mock_bridge_v1(hass): +def mock_bridge_v1(hass: HomeAssistant) -> Mock: """Mock a Hue bridge with V1 api.""" return create_mock_bridge(hass, api_version=1) @pytest.fixture -def mock_bridge_v2(hass): +def mock_bridge_v2(hass: HomeAssistant) -> Mock: """Mock a Hue bridge with V2 api.""" return create_mock_bridge(hass, api_version=2) @pytest.fixture -def mock_config_entry_v1(hass): +def mock_config_entry_v1() -> MockConfigEntry: """Mock a config entry for a Hue V1 bridge.""" return create_config_entry(api_version=1) @pytest.fixture -def mock_config_entry_v2(hass): +def mock_config_entry_v2() -> MockConfigEntry: """Mock a config entry.""" return create_config_entry(api_version=2) -def create_config_entry(api_version=1, host="mock-host"): +def create_config_entry( + api_version: int = 1, host: str = "mock-host" +) -> MockConfigEntry: """Mock a config entry for a Hue bridge.""" return MockConfigEntry( domain=hue.DOMAIN, @@ -230,7 +229,7 @@ def create_config_entry(api_version=1, host="mock-host"): ) -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Mock setup Hue component.""" with patch.object(hue, "async_setup_entry", return_value=True): assert ( @@ -243,7 +242,9 @@ async def setup_component(hass): ) -async def setup_bridge(hass, mock_bridge, config_entry): +async def setup_bridge( + hass: HomeAssistant, mock_bridge: Mock, config_entry: MockConfigEntry +) -> None: """Load the Hue integration with the provided bridge.""" mock_bridge.config_entry = config_entry with patch.object( @@ -255,11 +256,11 @@ async def setup_bridge(hass, mock_bridge, config_entry): async def setup_platform( - hass, - mock_bridge, - platforms, - hostname=None, -): + hass: HomeAssistant, + mock_bridge: Mock, + platforms: list[Platform] | tuple[Platform] | Platform, + hostname: str | None = None, +) -> None: """Load the Hue integration with the provided bridge for given platform(s).""" if not isinstance(platforms, (list, tuple)): platforms = [platforms] @@ -282,15 +283,3 @@ async def setup_platform( # and make sure it completes before going further await hass.async_block_till_done() - - -@pytest.fixture(name="device_reg") -def get_device_reg(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="calls") -def track_calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/hue/test_binary_sensor.py b/tests/components/hue/test_binary_sensor.py index 8f299a4b6a6..3721637a674 100644 --- a/tests/components/hue/test_binary_sensor.py +++ b/tests/components/hue/test_binary_sensor.py @@ -1,13 +1,16 @@ """Philips Hue binary_sensor platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_binary_sensors( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if all v2 binary_sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -78,7 +81,9 @@ async def test_binary_sensors( assert sensor.attributes["device_class"] == "motion" -async def test_binary_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_binary_sensor_add_update( + hass: HomeAssistant, mock_bridge_v2: Mock +) -> None: """Test if binary_sensor get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "binary_sensor") diff --git a/tests/components/hue/test_bridge.py b/tests/components/hue/test_bridge.py index 42631215035..be7a6738617 100644 --- a/tests/components/hue/test_bridge.py +++ b/tests/components/hue/test_bridge.py @@ -1,7 +1,7 @@ """Test Hue bridge.""" import asyncio -from unittest.mock import patch +from unittest.mock import Mock, patch from aiohttp import client_exceptions from aiohue.errors import Unauthorized @@ -21,7 +21,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from tests.common import MockConfigEntry -async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1) -> None: +async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test a successful setup for V1 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -45,7 +45,7 @@ async def test_bridge_setup_v1(hass: HomeAssistant, mock_api_v1) -> None: assert forward_entries == {"light", "binary_sensor", "sensor"} -async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2) -> None: +async def test_bridge_setup_v2(hass: HomeAssistant, mock_api_v2: Mock) -> None: """Test a successful setup for V2 bridge.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -113,7 +113,9 @@ async def test_bridge_setup_timeout(hass: HomeAssistant) -> None: await hue_bridge.async_initialize_bridge() -async def test_reset_unloads_entry_if_setup(hass: HomeAssistant, mock_api_v1) -> None: +async def test_reset_unloads_entry_if_setup( + hass: HomeAssistant, mock_api_v1: Mock +) -> None: """Test calling reset while the entry has been setup.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -143,7 +145,7 @@ async def test_reset_unloads_entry_if_setup(hass: HomeAssistant, mock_api_v1) -> assert len(hass.services.async_services()) == 0 -async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1) -> None: +async def test_handle_unauthorized(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test handling an unauthorized error on update.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/hue/test_device_trigger_v1.py b/tests/components/hue/test_device_trigger_v1.py index 3d8fa64baf4..37af8c6a880 100644 --- a/tests/components/hue/test_device_trigger_v1.py +++ b/tests/components/hue/test_device_trigger_v1.py @@ -1,5 +1,7 @@ """The tests for Philips Hue device triggers for V1 bridge.""" +from unittest.mock import Mock + from pytest_unordered import unordered from homeassistant.components import automation, hue @@ -20,8 +22,8 @@ REMOTES_RESPONSE = {"7": HUE_TAP_REMOTE_1, "8": HUE_DIMMER_REMOTE_1} async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v1, - device_reg: dr.DeviceRegistry, + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, ) -> None: """Test we get the expected triggers from a hue remote.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -32,7 +34,7 @@ async def test_get_triggers( assert len(hass.states.async_all()) == 1 # Get triggers for specific tap switch - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) triggers = await async_get_device_automations( @@ -53,7 +55,7 @@ async def test_get_triggers( assert triggers == unordered(expected_triggers) # Get triggers for specific dimmer switch - hue_dimmer_device = device_reg.async_get_device( + hue_dimmer_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) hue_bat_sensor = entity_registry.async_get( @@ -90,9 +92,9 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, - mock_bridge_v1, - device_reg: dr.DeviceRegistry, - calls: list[ServiceCall], + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press trigger firing.""" mock_bridge_v1.mock_sensor_responses.append(REMOTES_RESPONSE) @@ -101,7 +103,7 @@ async def test_if_fires_on_state_change( assert len(hass.states.async_all()) == 1 # Set an automation with a specific tap switch trigger - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) assert await async_setup_component( @@ -158,8 +160,8 @@ async def test_if_fires_on_state_change( assert len(mock_bridge_v1.mock_requests) == 2 - assert len(calls) == 1 - assert calls[0].data["some"] == "B4 - 18" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "B4 - 18" # Fake another button press. new_sensor_response["7"] = dict(new_sensor_response["7"]) @@ -173,4 +175,4 @@ async def test_if_fires_on_state_change( await mock_bridge_v1.sensor_manager.coordinator.async_refresh() await hass.async_block_till_done() assert len(mock_bridge_v1.mock_requests) == 3 - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/hue/test_device_trigger_v2.py b/tests/components/hue/test_device_trigger_v2.py index efdc33375a6..1115e63fd92 100644 --- a/tests/components/hue/test_device_trigger_v2.py +++ b/tests/components/hue/test_device_trigger_v2.py @@ -1,5 +1,7 @@ """The tests for Philips Hue device triggers for V2 bridge.""" +from unittest.mock import Mock + from aiohue.v2.models.button import ButtonEvent from pytest_unordered import unordered @@ -8,7 +10,8 @@ from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.hue.v2.device import async_setup_devices from homeassistant.components.hue.v2.hue_event import async_setup_hue_events from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform @@ -16,7 +19,7 @@ from tests.common import async_capture_events, async_get_device_automations async def test_hue_event( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test hue button events.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -53,16 +56,16 @@ async def test_hue_event( async def test_get_triggers( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, - device_reg, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, + device_registry: dr.DeviceRegistry, ) -> None: """Test we get the expected triggers from a hue remote.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) await setup_platform(hass, mock_bridge_v2, ["binary_sensor", "sensor"]) # Get triggers for `Wall switch with 2 controls` - hue_wall_switch_device = device_reg.async_get_device( + hue_wall_switch_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "3ff06175-29e8-44a8-8fe7-af591b0025da")} ) hue_bat_sensor = entity_registry.async_get( diff --git a/tests/components/hue/test_diagnostics.py b/tests/components/hue/test_diagnostics.py index 7e64ba1ad93..49681601ebf 100644 --- a/tests/components/hue/test_diagnostics.py +++ b/tests/components/hue/test_diagnostics.py @@ -1,5 +1,7 @@ """Test Hue diagnostics.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant from .conftest import setup_platform @@ -9,7 +11,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics_v1( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1 + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v1: Mock ) -> None: """Test diagnostics v1.""" await setup_platform(hass, mock_bridge_v1, []) @@ -19,7 +21,7 @@ async def test_diagnostics_v1( async def test_diagnostics_v2( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2 + hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge_v2: Mock ) -> None: """Test diagnostics v2.""" mock_bridge_v2.api.get_diagnostics.return_value = {"hello": "world"} diff --git a/tests/components/hue/test_event.py b/tests/components/hue/test_event.py index aedf11a6e82..33b4d16f8be 100644 --- a/tests/components/hue/test_event.py +++ b/tests/components/hue/test_event.py @@ -1,14 +1,17 @@ """Philips Hue Event platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components.event import ATTR_EVENT_TYPE, ATTR_EVENT_TYPES from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_ROTARY, FAKE_ZIGBEE_CONNECTIVITY async def test_event( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test event entity for Hue integration.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -63,7 +66,7 @@ async def test_event( assert state.attributes[ATTR_EVENT_TYPE] == "long_release" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test Event entity for newly added Relative Rotary resource.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "event") diff --git a/tests/components/hue/test_light_v1.py b/tests/components/hue/test_light_v1.py index 21b35e6d5e8..c742124e4f0 100644 --- a/tests/components/hue/test_light_v1.py +++ b/tests/components/hue/test_light_v1.py @@ -175,7 +175,7 @@ LIGHT_GAMUT = color.GamutType( LIGHT_GAMUT_TYPE = "A" -async def setup_bridge(hass: HomeAssistant, mock_bridge_v1): +async def setup_bridge(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Load the Hue light platform with the provided bridge.""" hass.config.components.add(hue.DOMAIN) config_entry = create_config_entry() @@ -192,7 +192,7 @@ async def setup_bridge(hass: HomeAssistant, mock_bridge_v1): async def test_not_load_groups_if_old_bridge( - hass: HomeAssistant, mock_bridge_v1 + hass: HomeAssistant, mock_bridge_v1: Mock ) -> None: """Test that we don't try to load groups if bridge runs old software.""" mock_bridge_v1.api.config.apiversion = "1.12.0" @@ -203,7 +203,7 @@ async def test_not_load_groups_if_old_bridge( assert len(hass.states.async_all()) == 0 -async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_lights function when no lights are found.""" mock_bridge_v1.mock_light_responses.append({}) mock_bridge_v1.mock_group_responses.append({}) @@ -212,7 +212,7 @@ async def test_no_lights_or_groups(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_lights(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_lights(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -232,7 +232,7 @@ async def test_lights(hass: HomeAssistant, mock_bridge_v1) -> None: assert lamp_2.state == "off" -async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test that lights only report appropriate color mode.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) mock_bridge_v1.mock_group_responses.append(GROUP_RESPONSE) @@ -278,7 +278,7 @@ async def test_lights_color_mode(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_groups( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock ) -> None: """Test the update_lights function with some lights.""" mock_bridge_v1.mock_light_responses.append({}) @@ -303,7 +303,7 @@ async def test_groups( assert entity_registry.async_get("light.group_2").unique_id == "2" -async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -350,7 +350,7 @@ async def test_new_group_discovered(hass: HomeAssistant, mock_bridge_v1) -> None assert new_group.attributes["color_temp"] == 250 -async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -396,7 +396,7 @@ async def test_new_light_discovered(hass: HomeAssistant, mock_bridge_v1) -> None assert light.state == "off" -async def test_group_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_group_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed group.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -427,7 +427,7 @@ async def test_group_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_group is None -async def test_light_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -456,7 +456,7 @@ async def test_light_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_light is None -async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test changing one group that will impact the state of other light.""" mock_bridge_v1.allow_groups = True mock_bridge_v1.mock_light_responses.append({}) @@ -509,7 +509,7 @@ async def test_other_group_update(hass: HomeAssistant, mock_bridge_v1) -> None: assert group_2.state == "off" -async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test changing one light that will impact state of other light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -562,7 +562,7 @@ async def test_other_light_update(hass: HomeAssistant, mock_bridge_v1) -> None: assert lamp_2.attributes["brightness"] == 100 -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=TimeoutError) mock_bridge_v1.api.groups.update = Mock(side_effect=TimeoutError) @@ -571,7 +571,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.lights.update = Mock(side_effect=aiohue.Unauthorized) await setup_bridge(hass, mock_bridge_v1) @@ -580,7 +580,7 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(mock_bridge_v1.handle_unauthorized_error.mock_calls) == 1 -async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -633,7 +633,9 @@ async def test_light_turn_on_service(hass: HomeAssistant, mock_bridge_v1) -> Non } -async def test_light_turn_off_service(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_light_turn_off_service( + hass: HomeAssistant, mock_bridge_v1: Mock +) -> None: """Test calling the turn on service on a light.""" mock_bridge_v1.mock_light_responses.append(LIGHT_RESPONSE) @@ -775,7 +777,7 @@ async def test_group_features( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v1, + mock_bridge_v1: Mock, ) -> None: """Test group features.""" color_temp_type = "Color temperature light" diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index fca907eabb0..417670a3769 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -1,15 +1,18 @@ """Philips Hue lights platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components.light import ColorMode from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_DEVICE, FAKE_LIGHT, FAKE_ZIGBEE_CONNECTIVITY async def test_lights( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if all v2 lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -77,7 +80,7 @@ async def test_lights( async def test_light_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -229,7 +232,7 @@ async def test_light_turn_on_service( async def test_light_turn_off_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn off service on a light.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -318,7 +321,7 @@ async def test_light_turn_off_service( assert mock_bridge_v2.mock_requests[4]["json"]["identify"]["action"] == "identify" -async def test_light_added(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_light_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test new light added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) @@ -341,7 +344,7 @@ async def test_light_added(hass: HomeAssistant, mock_bridge_v2) -> None: async def test_light_availability( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test light availability property.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -375,8 +378,8 @@ async def test_light_availability( async def test_grouped_lights( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if all v2 grouped lights get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) diff --git a/tests/components/hue/test_migration.py b/tests/components/hue/test_migration.py index adcc582a314..388e2f68f99 100644 --- a/tests/components/hue/test_migration.py +++ b/tests/components/hue/test_migration.py @@ -1,10 +1,11 @@ """Test Hue migration logic.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.json import JsonArrayType from tests.common import MockConfigEntry @@ -51,9 +52,9 @@ async def test_light_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -98,9 +99,9 @@ async def test_sensor_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for sensors migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -159,9 +160,9 @@ async def test_sensor_entity_migration( async def test_group_entity_migration_with_v1_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 @@ -194,9 +195,9 @@ async def test_group_entity_migration_with_v1_id( async def test_group_entity_migration_with_v2_group_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - mock_config_entry_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + mock_config_entry_v2: MockConfigEntry, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if entity schema for grouped_lights migrates from v1 to v2.""" config_entry = mock_bridge_v2.config_entry = mock_config_entry_v2 diff --git a/tests/components/hue/test_scene.py b/tests/components/hue/test_scene.py index 5e2fd939087..9488e0e14ce 100644 --- a/tests/components/hue/test_scene.py +++ b/tests/components/hue/test_scene.py @@ -1,8 +1,11 @@ """Philips Hue scene platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.const import STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_SCENE @@ -11,8 +14,8 @@ from .const import FAKE_SCENE async def test_scene( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if (config) scenes get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -32,7 +35,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "zone" assert test_entity.attributes["name"] == "Dynamic Test Scene" assert test_entity.attributes["speed"] == 0.6269841194152832 - assert test_entity.attributes["brightness"] == 46.85 + assert test_entity.attributes["brightness"] == 119 assert test_entity.attributes["is_dynamic"] is True # test (regular) scene for a hue room @@ -44,7 +47,7 @@ async def test_scene( assert test_entity.attributes["group_type"] == "room" assert test_entity.attributes["name"] == "Regular Test Scene" assert test_entity.attributes["speed"] == 0.5 - assert test_entity.attributes["brightness"] == 100.0 + assert test_entity.attributes["brightness"] == 255 assert test_entity.attributes["is_dynamic"] is False # test smart scene @@ -72,7 +75,7 @@ async def test_scene( async def test_scene_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -109,7 +112,7 @@ async def test_scene_turn_on_service( async def test_scene_advanced_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the advanced turn on service on a scene.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -146,7 +149,7 @@ async def test_scene_advanced_turn_on_service( async def test_scene_updates( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test scene events from bridge.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -167,7 +170,7 @@ async def test_scene_updates( assert test_entity is not None assert test_entity.state == STATE_UNKNOWN assert test_entity.name == "Test Room Mocked Scene" - assert test_entity.attributes["brightness"] == 65.0 + assert test_entity.attributes["brightness"] == 166 # test update updated_resource = {**FAKE_SCENE} @@ -176,7 +179,7 @@ async def test_scene_updates( await hass.async_block_till_done() test_entity = hass.states.get(test_entity_id) assert test_entity is not None - assert test_entity.attributes["brightness"] == 35.0 + assert test_entity.attributes["brightness"] == 89 # # test entity name changes on group name change mock_bridge_v2.api.emit_event( diff --git a/tests/components/hue/test_sensor_v1.py b/tests/components/hue/test_sensor_v1.py index b1ef94f8ed0..0c5d7cccfe2 100644 --- a/tests/components/hue/test_sensor_v1.py +++ b/tests/components/hue/test_sensor_v1.py @@ -10,7 +10,7 @@ from homeassistant.components.hue.const import ATTR_HUE_EVENT from homeassistant.components.hue.v1 import sensor_base from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from .conftest import create_mock_bridge, setup_platform @@ -282,7 +282,7 @@ SENSOR_RESPONSE = { } -async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test the update_items function when no sensors are found.""" mock_bridge_v1.mock_sensor_responses.append({}) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -291,7 +291,7 @@ async def test_no_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_sensors_with_multiple_bridges( - hass: HomeAssistant, mock_bridge_v1 + hass: HomeAssistant, mock_bridge_v1: Mock ) -> None: """Test the update_items function with some sensors.""" mock_bridge_2 = create_mock_bridge(hass, api_version=1) @@ -315,7 +315,7 @@ async def test_sensors_with_multiple_bridges( async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1 + hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_bridge_v1: Mock ) -> None: """Test the update_items function with some sensors.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -361,7 +361,7 @@ async def test_sensors( ) -async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test that unsupported sensors don't get added and don't fail.""" response_with_unsupported = dict(SENSOR_RESPONSE) response_with_unsupported["7"] = UNSUPPORTED_SENSOR @@ -372,7 +372,7 @@ async def test_unsupported_sensors(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 7 -async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has a new sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -406,7 +406,7 @@ async def test_new_sensor_discovered(hass: HomeAssistant, mock_bridge_v1) -> Non assert temperature.state == "17.75" -async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test if 2nd update has removed sensor.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -434,7 +434,7 @@ async def test_sensor_removed(hass: HomeAssistant, mock_bridge_v1) -> None: assert removed_sensor is None -async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not available if timeout error during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=TimeoutError) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -442,7 +442,7 @@ async def test_update_timeout(hass: HomeAssistant, mock_bridge_v1) -> None: assert len(hass.states.async_all()) == 0 -async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: +async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1: Mock) -> None: """Test bridge marked as not authorized if unauthorized during update.""" mock_bridge_v1.api.sensors.update = Mock(side_effect=aiohue.Unauthorized) await setup_platform(hass, mock_bridge_v1, ["binary_sensor", "sensor"]) @@ -452,7 +452,10 @@ async def test_update_unauthorized(hass: HomeAssistant, mock_bridge_v1) -> None: async def test_hue_events( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_bridge_v1, device_reg + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_bridge_v1: Mock, + device_registry: dr.DeviceRegistry, ) -> None: """Test that hue remotes fire events when pressed.""" mock_bridge_v1.mock_sensor_responses.append(SENSOR_RESPONSE) @@ -464,7 +467,7 @@ async def test_hue_events( assert len(hass.states.async_all()) == 7 assert len(events) == 0 - hue_tap_device = device_reg.async_get_device( + hue_tap_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:00:00:00:00:44:23:08")} ) @@ -495,7 +498,7 @@ async def test_hue_events( "last_updated": "2019-12-28T22:58:03", } - hue_dimmer_device = device_reg.async_get_device( + hue_dimmer_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "00:17:88:01:10:3e:3a:dc")} ) @@ -594,7 +597,7 @@ async def test_hue_events( async_fire_time_changed(hass) await hass.async_block_till_done() - hue_aurora_device = device_reg.async_get_device( + hue_aurora_device = device_registry.async_get_device( identifiers={(hue.DOMAIN, "ff:ff:00:0f:e7:fd:bc:b7")} ) diff --git a/tests/components/hue/test_sensor_v2.py b/tests/components/hue/test_sensor_v2.py index beb86de505b..22888a411ba 100644 --- a/tests/components/hue/test_sensor_v2.py +++ b/tests/components/hue/test_sensor_v2.py @@ -1,19 +1,24 @@ """Philips Hue sensor platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.components import hue from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType from .conftest import setup_bridge, setup_platform from .const import FAKE_DEVICE, FAKE_SENSOR, FAKE_ZIGBEE_CONNECTIVITY +from tests.common import MockConfigEntry + async def test_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, ) -> None: """Test if all v2 sensors get created with correct features.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -65,9 +70,9 @@ async def test_sensors( async def test_enable_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_bridge_v2, - v2_resources_test_data, - mock_config_entry_v2, + mock_bridge_v2: Mock, + v2_resources_test_data: JsonArrayType, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test enabling of the by default disabled zigbee_connectivity sensor.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -105,7 +110,7 @@ async def test_enable_sensor( assert state.attributes["mac_address"] == "00:17:88:01:0b:aa:bb:99" -async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_sensor_add_update(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test if sensors get added/updated from events.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) await setup_platform(hass, mock_bridge_v2, "sensor") diff --git a/tests/components/hue/test_services.py b/tests/components/hue/test_services.py index 6ce3cf2cc82..26a4cab8261 100644 --- a/tests/components/hue/test_services.py +++ b/tests/components/hue/test_services.py @@ -1,6 +1,6 @@ """Test Hue services.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from homeassistant.components import hue from homeassistant.components.hue import bridge @@ -48,7 +48,7 @@ SCENE_RESPONSE = { } -async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1) -> None: +async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1: Mock) -> None: """Test successful hue_activate_scene.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -83,7 +83,9 @@ async def test_hue_activate_scene(hass: HomeAssistant, mock_api_v1) -> None: assert mock_api_v1.mock_requests[2]["path"] == "groups/group_1/action" -async def test_hue_activate_scene_transition(hass: HomeAssistant, mock_api_v1) -> None: +async def test_hue_activate_scene_transition( + hass: HomeAssistant, mock_api_v1: Mock +) -> None: """Test successful hue_activate_scene with transition.""" config_entry = MockConfigEntry( domain=hue.DOMAIN, @@ -119,7 +121,7 @@ async def test_hue_activate_scene_transition(hass: HomeAssistant, mock_api_v1) - async def test_hue_activate_scene_group_not_found( - hass: HomeAssistant, mock_api_v1 + hass: HomeAssistant, mock_api_v1: Mock ) -> None: """Test failed hue_activate_scene due to missing group.""" config_entry = MockConfigEntry( @@ -151,7 +153,7 @@ async def test_hue_activate_scene_group_not_found( async def test_hue_activate_scene_scene_not_found( - hass: HomeAssistant, mock_api_v1 + hass: HomeAssistant, mock_api_v1: Mock ) -> None: """Test failed hue_activate_scene due to missing scene.""" config_entry = MockConfigEntry( @@ -184,10 +186,10 @@ async def test_hue_activate_scene_scene_not_found( async def test_hue_multi_bridge_activate_scene_all_respond( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes multiple bridges successfully activate a scene.""" await setup_component(hass) @@ -218,10 +220,10 @@ async def test_hue_multi_bridge_activate_scene_all_respond( async def test_hue_multi_bridge_activate_scene_one_responds( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes only one bridge successfully activate a scene.""" await setup_component(hass) @@ -251,10 +253,10 @@ async def test_hue_multi_bridge_activate_scene_one_responds( async def test_hue_multi_bridge_activate_scene_zero_responds( hass: HomeAssistant, - mock_bridge_v1, - mock_bridge_v2, - mock_config_entry_v1, - mock_config_entry_v2, + mock_bridge_v1: Mock, + mock_bridge_v2: Mock, + mock_config_entry_v1: MockConfigEntry, + mock_config_entry_v2: MockConfigEntry, ) -> None: """Test that makes no bridge successfully activate a scene.""" await setup_component(hass) diff --git a/tests/components/hue/test_switch.py b/tests/components/hue/test_switch.py index 2e25dd715c1..478acbaa303 100644 --- a/tests/components/hue/test_switch.py +++ b/tests/components/hue/test_switch.py @@ -1,13 +1,16 @@ """Philips Hue switch platform tests for V2 bridge/api.""" +from unittest.mock import Mock + from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .conftest import setup_platform from .const import FAKE_BINARY_SENSOR, FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY async def test_switch( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test if (config) switches get created.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -34,7 +37,7 @@ async def test_switch( async def test_switch_turn_on_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn on service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -58,7 +61,7 @@ async def test_switch_turn_on_service( async def test_switch_turn_off_service( - hass: HomeAssistant, mock_bridge_v2, v2_resources_test_data + hass: HomeAssistant, mock_bridge_v2: Mock, v2_resources_test_data: JsonArrayType ) -> None: """Test calling the turn off service on a switch.""" await mock_bridge_v2.api.load_test_data(v2_resources_test_data) @@ -98,7 +101,7 @@ async def test_switch_turn_off_service( assert test_entity.state == "off" -async def test_switch_added(hass: HomeAssistant, mock_bridge_v2) -> None: +async def test_switch_added(hass: HomeAssistant, mock_bridge_v2: Mock) -> None: """Test new switch added to bridge.""" await mock_bridge_v2.api.load_test_data([FAKE_DEVICE, FAKE_ZIGBEE_CONNECTIVITY]) diff --git a/tests/components/humidifier/test_device_condition.py b/tests/components/humidifier/test_device_condition.py index 4f4d21adcba..ec8406bfe7b 100644 --- a/tests/components/humidifier/test_device_condition.py +++ b/tests/components/humidifier/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("set_state", "features_reg", "features_state", "expected_condition_types"), [ @@ -153,7 +143,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -238,42 +228,42 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_mode - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_mode - event - test_event3" hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_HOME}) # Should not fire hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -316,15 +306,15 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON, {ATTR_MODE: const.MODE_AWAY}) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_mode - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_mode - event - test_event1" @pytest.mark.parametrize( diff --git a/tests/components/humidifier/test_device_trigger.py b/tests/components/humidifier/test_device_trigger.py index 83202e16675..3bb1f8c2551 100644 --- a/tests/components/humidifier/test_device_trigger.py +++ b/tests/components/humidifier/test_device_trigger.py @@ -30,7 +30,6 @@ from tests.common import ( MockConfigEntry, async_fire_time_changed, async_get_device_automations, - async_mock_service, ) @@ -39,12 +38,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -166,7 +159,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -356,8 +349,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 35}, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "target_humidity_changed_below" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "target_humidity_changed_below" # Fake that the current humidity is changing hass.states.async_set( @@ -366,8 +359,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 7, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "current_humidity_changed_below" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "current_humidity_changed_below" # Fake that the humidity target is changing hass.states.async_set( @@ -376,8 +369,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 18}, ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "target_humidity_changed_above" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "target_humidity_changed_above" # Fake that the current humidity is changing hass.states.async_set( @@ -386,14 +379,14 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "current_humidity_changed_above" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "current_humidity_changed_above" # Wait 6 minutes async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=6)) await hass.async_block_till_done() - assert len(calls) == 6 - assert {calls[4].data["some"], calls[5].data["some"]} == { + assert len(service_calls) == 6 + assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { "current_humidity_changed_above_for", "target_humidity_changed_above_for", } @@ -405,8 +398,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 8 - assert {calls[6].data["some"], calls[7].data["some"]} == { + assert len(service_calls) == 8 + assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { "turn_off device - humidifier.test_5678 - on - off - None", "turn_on_or_off device - humidifier.test_5678 - on - off - None", } @@ -418,8 +411,8 @@ async def test_if_fires_on_state_change( {const.ATTR_HUMIDITY: 37, const.ATTR_CURRENT_HUMIDITY: 41}, ) await hass.async_block_till_done() - assert len(calls) == 10 - assert {calls[8].data["some"], calls[9].data["some"]} == { + assert len(service_calls) == 10 + assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { "turn_on device - humidifier.test_5678 - off - on - None", "turn_on_or_off device - humidifier.test_5678 - off - on - None", } @@ -429,7 +422,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -479,12 +472,14 @@ async def test_if_fires_on_state_change_legacy( # Fake that the humidity is changing hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "target_humidity_changed_below" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "target_humidity_changed_below" async def test_invalid_config( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get_or_create(DOMAIN, "test", "5678") @@ -528,7 +523,7 @@ async def test_invalid_config( hass.states.async_set(entry.entity_id, STATE_ON, {const.ATTR_HUMIDITY: 7}) await hass.async_block_till_done() # Should not trigger for invalid config - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_get_trigger_capabilities_on(hass: HomeAssistant) -> None: diff --git a/tests/components/humidifier/test_init.py b/tests/components/humidifier/test_init.py index b90e7084dd1..b31750a3a3b 100644 --- a/tests/components/humidifier/test_init.py +++ b/tests/components/humidifier/test_init.py @@ -48,7 +48,7 @@ async def test_sync_turn_off(hass: HomeAssistant) -> None: assert humidifier.turn_off.called -def _create_tuples(enum: Enum, constant_prefix: str) -> list[tuple[Enum, str]]: +def _create_tuples(enum: type[Enum], constant_prefix: str) -> list[tuple[Enum, str]]: return [(enum_field, constant_prefix) for enum_field in enum] diff --git a/tests/components/hunterdouglas_powerview/conftest.py b/tests/components/hunterdouglas_powerview/conftest.py index da339914aac..d4433f93dcb 100644 --- a/tests/components/hunterdouglas_powerview/conftest.py +++ b/tests/components/hunterdouglas_powerview/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for Hunter Douglas Powerview tests.""" -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, PropertyMock, patch from aiopvapi.resources.shade import ShadePosition import pytest -from typing_extensions import Generator from homeassistant.components.hunterdouglas_powerview.const import DOMAIN @@ -29,7 +29,7 @@ def mock_hunterdouglas_hub( rooms_json: str, scenes_json: str, shades_json: str, -) -> Generator[MagicMock]: +) -> Generator[None]: """Return a mocked Powerview Hub with all data populated.""" with ( patch( diff --git a/tests/components/hunterdouglas_powerview/test_scene.py b/tests/components/hunterdouglas_powerview/test_scene.py index 9628805d0e8..43074d55470 100644 --- a/tests/components/hunterdouglas_powerview/test_scene.py +++ b/tests/components/hunterdouglas_powerview/test_scene.py @@ -14,10 +14,10 @@ from .const import MOCK_MAC from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_hunterdouglas_hub") @pytest.mark.parametrize("api_version", [1, 2, 3]) async def test_scenes( hass: HomeAssistant, - mock_hunterdouglas_hub: None, api_version: int, ) -> None: """Test the scenes.""" diff --git a/tests/components/husqvarna_automower/__init__.py b/tests/components/husqvarna_automower/__init__.py index 8c51d69ba3d..9473b68a5ed 100644 --- a/tests/components/husqvarna_automower/__init__.py +++ b/tests/components/husqvarna_automower/__init__.py @@ -7,6 +7,10 @@ from tests.common import MockConfigEntry async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Fixture for setting up the component.""" + # We lock the timezone, because the timezone is passed to the library to generate + # some values like the next start sensor. This is needed, as the device is not aware + # of its own timezone. So we assume the device is in the timezone which is selected in + # the Home Assistant config. + await hass.config.async_set_time_zone("Europe/Berlin") config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/conftest.py b/tests/components/husqvarna_automower/conftest.py index 7ace3b76808..dbb8f3b4c72 100644 --- a/tests/components/husqvarna_automower/conftest.py +++ b/tests/components/husqvarna_automower/conftest.py @@ -1,5 +1,6 @@ """Test helpers for Husqvarna Automower.""" +from collections.abc import Generator import time from unittest.mock import AsyncMock, patch @@ -7,7 +8,6 @@ from aioautomower.session import AutomowerSession, _MowerCommands from aioautomower.utils import mower_list_to_dictionary_dataclass from aiohttp import ClientWebSocketResponse import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index d8cd748c793..212be85ce51 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -7,18 +7,20 @@ 'calendar': dict({ 'events': list([ dict({ - 'end': '2024-03-02T00:00:00+00:00', + 'end': '2024-03-02T00:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', - 'start': '2024-03-01T19:00:00+00:00', + 'start': '2024-03-01T19:00:00', 'uid': '1140_300_MO,WE,FR', 'work_area_id': None, + 'work_area_name': None, }), dict({ - 'end': '2024-03-02T08:00:00+00:00', + 'end': '2024-03-02T08:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', - 'start': '2024-03-02T00:00:00+00:00', + 'start': '2024-03-02T00:00:00', 'uid': '0_480_TU,TH,SA', 'work_area_id': None, + 'work_area_name': None, }), ]), 'tasks': list([ @@ -33,6 +35,7 @@ 'tuesday': False, 'wednesday': True, 'work_area_id': None, + 'work_area_name': None, }), dict({ 'duration': 480, @@ -45,6 +48,7 @@ 'tuesday': True, 'wednesday': False, 'work_area_id': None, + 'work_area_name': None, }), ]), }), @@ -61,17 +65,18 @@ 'mower': dict({ 'activity': 'PARKED_IN_CS', 'error_code': 0, - 'error_datetime': None, 'error_datetime_naive': None, 'error_key': None, + 'error_timestamp': 0, 'inactive_reason': 'NONE', 'is_error_confirmable': False, 'mode': 'MAIN_AREA', 'state': 'RESTRICTED', 'work_area_id': 123456, + 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start_datetime': '2023-06-05T19:00:00+00:00', + 'next_start': 1685991600000, 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'NOT_ACTIVE', @@ -113,6 +118,17 @@ 'name': 'Test Mower 1', 'serial_number': 123, }), + 'work_area_dict': dict({ + '0': 'my_lawn', + '123456': 'Front lawn', + '654321': 'Back lawn', + }), + 'work_area_names': list([ + 'Front lawn', + 'Back lawn', + 'my_lawn', + 'no_work_area_active', + ]), 'work_areas': dict({ '0': dict({ 'cutting_height': 50, diff --git a/tests/components/husqvarna_automower/snapshots/test_init.ambr b/tests/components/husqvarna_automower/snapshots/test_init.ambr index efe1eb8bd51..ccfb1bf3df4 100644 --- a/tests/components/husqvarna_automower/snapshots/test_init.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Husqvarna', 'model': '450XH-TEST', + 'model_id': None, 'name': 'Test Mower 1', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 935303e48fb..c727a49b71a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -548,7 +548,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-05T19:00:00+00:00', + 'state': '2023-06-05T17:00:00+00:00', }) # --- # name: test_sensor_snapshot[sensor.test_mower_1_none-entry] @@ -1100,6 +1100,11 @@ 'my_lawn', 'no_work_area_active', ]), + 'work_area_id_assignment': dict({ + 0: 'my_lawn', + 123456: 'Front lawn', + 654321: 'Back lawn', + }), }), 'context': , 'entity_id': 'sensor.test_mower_1_work_area', diff --git a/tests/components/husqvarna_automower/test_diagnostics.py b/tests/components/husqvarna_automower/test_diagnostics.py index eeb6b46e6c4..3166b09f1ee 100644 --- a/tests/components/husqvarna_automower/test_diagnostics.py +++ b/tests/components/husqvarna_automower/test_diagnostics.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,7 +37,7 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) @pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 5d5cacfc6bf..2ae427e0e1e 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -13,7 +13,7 @@ from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.components.lawn_mower import LawnMowerActivity from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from . import setup_integration from .const import TEST_MOWER_ID @@ -122,7 +122,7 @@ async def test_lawn_mower_commands( async def test_lawn_mower_service_commands( hass: HomeAssistant, aioautomower_command: str, - extra_data: int | None, + extra_data: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, @@ -158,27 +158,112 @@ async def test_lawn_mower_service_commands( @pytest.mark.parametrize( - ("service", "service_data"), + ("aioautomower_command", "extra_data1", "extra_data2", "service", "service_data"), [ ( - "override_schedule", + "start_in_workarea", + 123456, + timedelta(days=40), + "override_schedule_work_area", { - "duration": {"days": 1, "hours": 12, "minutes": 30}, - "override_mode": "fly_to_moon", + "work_area_id": 123456, + "duration": {"days": 40}, }, ), ], ) -async def test_lawn_mower_wrong_service_commands( +async def test_lawn_mower_override_work_area_command( hass: HomeAssistant, + aioautomower_command: str, + extra_data1: int, + extra_data2: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test lawn_mower commands.""" + """Test lawn_mower work area override commands.""" await setup_integration(hass, mock_config_entry) - with pytest.raises(MultipleInvalid): + mocked_method = AsyncMock() + setattr(mock_automower_client.commands, aioautomower_command, mocked_method) + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + mocked_method.assert_called_once_with(TEST_MOWER_ID, extra_data1, extra_data2) + + getattr( + mock_automower_client.commands, aioautomower_command + ).side_effect = ApiException("Test error") + with pytest.raises( + HomeAssistantError, + match="Failed to send command: Test error", + ): + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "mower_support_wa", "exception"), + [ + ( + "override_schedule", + { + "duration": {"days": 1, "hours": 12, "minutes": 30}, + "override_mode": "fly_to_moon", + }, + False, + MultipleInvalid, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 123456, + "duration": {"days": 40}, + }, + False, + ServiceValidationError, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 12345, + "duration": {"days": 40}, + }, + True, + ServiceValidationError, + ), + ], +) +async def test_lawn_mower_wrong_service_commands( + hass: HomeAssistant, + service: str, + service_data: dict[str, int] | None, + mower_support_wa: bool, + exception, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lawn_mower commands.""" + await setup_integration(hass, mock_config_entry) + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + with pytest.raises(exception): await hass.services.async_call( domain=DOMAIN, service=service, diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index 0547d6a9b2e..9f2f8793bba 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -1,13 +1,18 @@ """Tests for number platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException from aioautomower.utils import mower_list_to_dictionary_dataclass +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -16,7 +21,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, load_json_value_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -41,7 +51,7 @@ async def test_number_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="number", @@ -57,6 +67,7 @@ async def test_number_workarea_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" @@ -75,8 +86,11 @@ async def test_number_workarea_commands( service="set_value", target={"entity_id": entity_id}, service_data={"value": "75"}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) state = hass.states.get(entity_id) assert state.state is not None @@ -85,7 +99,7 @@ async def test_number_workarea_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="number", diff --git a/tests/components/husqvarna_automower/test_select.py b/tests/components/husqvarna_automower/test_select.py index 2728bb5e672..e885a4d3487 100644 --- a/tests/components/husqvarna_automower/test_select.py +++ b/tests/components/husqvarna_automower/test_select.py @@ -88,7 +88,7 @@ async def test_select_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="select", diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 314bcaaa00c..1a4f545ac96 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -73,12 +73,12 @@ async def test_next_start_sensor( await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_next_start") assert state is not None - assert state.state == "2023-06-05T19:00:00+00:00" + assert state.state == "2023-06-05T17:00:00+00:00" values = mower_list_to_dictionary_dataclass( load_json_value_fixture("mower.json", DOMAIN) ) - values[TEST_MOWER_ID].planner.next_start_datetime = None + values[TEST_MOWER_ID].planner.next_start_datetime_naive = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 08450158876..5b4e465e253 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -1,5 +1,6 @@ """Tests for switch platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException @@ -9,7 +10,10 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -83,7 +87,7 @@ async def test_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="switch", @@ -109,6 +113,7 @@ async def test_stay_out_zone_switch_commands( excepted_state: str, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" @@ -124,8 +129,11 @@ async def test_stay_out_zone_switch_commands( domain="switch", service=service, service_data={"entity_id": entity_id}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, boolean) state = hass.states.get(entity_id) assert state is not None @@ -134,7 +142,7 @@ async def test_stay_out_zone_switch_commands( mocked_method.side_effect = ApiException("Test error") with pytest.raises( HomeAssistantError, - match="Command couldn't be sent to the command queue: Test error", + match="Failed to send command: Test error", ): await hass.services.async_call( domain="switch", diff --git a/tests/components/hydrawise/snapshots/test_valve.ambr b/tests/components/hydrawise/snapshots/test_valve.ambr new file mode 100644 index 00000000000..cac08893324 --- /dev/null +++ b/tests/components/hydrawise/snapshots/test_valve.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_all_valves[valve.zone_one-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.zone_one', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '5965394_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_valves[valve.zone_one-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'water', + 'friendly_name': 'Zone One', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.zone_one', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_all_valves[valve.zone_two-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'valve', + 'entity_category': None, + 'entity_id': 'valve.zone_two', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'hydrawise', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '5965395_zone', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_valves[valve.zone_two-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by hydrawise.com', + 'device_class': 'water', + 'friendly_name': 'Zone Two', + 'supported_features': , + }), + 'context': , + 'entity_id': 'valve.zone_two', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/hydrawise/test_services.py b/tests/components/hydrawise/test_services.py new file mode 100644 index 00000000000..f61a6786270 --- /dev/null +++ b/tests/components/hydrawise/test_services.py @@ -0,0 +1,93 @@ +"""Test Hydrawise services.""" + +from datetime import datetime +from unittest.mock import AsyncMock + +from pydrawise.schema import Zone + +from homeassistant.components.hydrawise.const import ( + ATTR_DURATION, + ATTR_UNTIL, + DOMAIN, + SERVICE_RESUME, + SERVICE_START_WATERING, + SERVICE_SUSPEND, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_start_watering( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the start_watering service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_START_WATERING, + { + ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", + ATTR_DURATION: 20, + }, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with( + zones[0], custom_run_duration=20 * 60 + ) + + +async def test_start_watering_no_duration( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the start_watering service works with no duration specified.""" + await hass.services.async_call( + DOMAIN, + SERVICE_START_WATERING, + {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with(zones[0], custom_run_duration=0) + + +async def test_resume( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the resume service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_RESUME, + {ATTR_ENTITY_ID: "binary_sensor.zone_one_watering"}, + blocking=True, + ) + mock_pydrawise.resume_zone.assert_called_once_with(zones[0]) + + +async def test_suspend( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test that the suspend service works as intended.""" + await hass.services.async_call( + DOMAIN, + SERVICE_SUSPEND, + { + ATTR_ENTITY_ID: "binary_sensor.zone_one_watering", + ATTR_UNTIL: datetime(2026, 1, 1, 0, 0, 0), + }, + blocking=True, + ) + mock_pydrawise.suspend_zone.assert_called_once_with( + zones[0], until=datetime(2026, 1, 1, 0, 0, 0) + ) diff --git a/tests/components/hydrawise/test_valve.py b/tests/components/hydrawise/test_valve.py new file mode 100644 index 00000000000..918fae00017 --- /dev/null +++ b/tests/components/hydrawise/test_valve.py @@ -0,0 +1,59 @@ +"""Test Hydrawise valve.""" + +from collections.abc import Awaitable, Callable +from unittest.mock import AsyncMock, patch + +from pydrawise.schema import Zone +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.valve import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_CLOSE_VALVE, + SERVICE_OPEN_VALVE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_valves( + hass: HomeAssistant, + mock_add_config_entry: Callable[[], Awaitable[MockConfigEntry]], + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all valves are working.""" + with patch( + "homeassistant.components.hydrawise.PLATFORMS", + [Platform.VALVE], + ): + config_entry = await mock_add_config_entry() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +async def test_services( + hass: HomeAssistant, + mock_added_config_entry: MockConfigEntry, + mock_pydrawise: AsyncMock, + zones: list[Zone], +) -> None: + """Test valve services.""" + await hass.services.async_call( + DOMAIN, + SERVICE_OPEN_VALVE, + service_data={ATTR_ENTITY_ID: "valve.zone_one"}, + blocking=True, + ) + mock_pydrawise.start_zone.assert_called_once_with(zones[0]) + mock_pydrawise.reset_mock() + + await hass.services.async_call( + DOMAIN, + SERVICE_CLOSE_VALVE, + service_data={ATTR_ENTITY_ID: "valve.zone_one"}, + blocking=True, + ) + mock_pydrawise.stop_zone.assert_called_once_with(zones[0]) diff --git a/tests/components/idasen_desk/conftest.py b/tests/components/idasen_desk/conftest.py index 91f3f2de40e..24ef8311445 100644 --- a/tests/components/idasen_desk/conftest.py +++ b/tests/components/idasen_desk/conftest.py @@ -1,11 +1,10 @@ """IKEA Idasen Desk fixtures.""" -from collections.abc import Callable +from collections.abc import Callable, Generator from unittest import mock from unittest.mock import AsyncMock, MagicMock import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/image/conftest.py b/tests/components/image/conftest.py index 65bbf2e0c4f..8bb5d19b6db 100644 --- a/tests/components/image/conftest.py +++ b/tests/components/image/conftest.py @@ -1,7 +1,8 @@ """Test helpers for image.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.components import image from homeassistant.config_entries import ConfigEntry, ConfigFlow diff --git a/tests/components/image/test_media_source.py b/tests/components/image/test_media_source.py index 2037641a1a3..73cc76b9fb7 100644 --- a/tests/components/image/test_media_source.py +++ b/tests/components/image/test_media_source.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_media_source(hass): +async def setup_media_source(hass: HomeAssistant) -> None: """Set up media source.""" assert await async_setup_component(hass, "media_source", {}) diff --git a/tests/components/imap/conftest.py b/tests/components/imap/conftest.py index 354c9fbe24e..87663031e7a 100644 --- a/tests/components/imap/conftest.py +++ b/tests/components/imap/conftest.py @@ -1,10 +1,10 @@ """Fixtures for imap tests.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from aioimaplib import AUTH, LOGOUT, NONAUTH, SELECTED, STARTED, Response import pytest -from typing_extensions import AsyncGenerator, Generator from .const import EMPTY_SEARCH_RESPONSE, TEST_FETCH_RESPONSE_TEXT_PLAIN diff --git a/tests/components/imgw_pib/conftest.py b/tests/components/imgw_pib/conftest.py index 1d278856b5b..6f23ed3ee80 100644 --- a/tests/components/imgw_pib/conftest.py +++ b/tests/components/imgw_pib/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the IMGW-PIB tests.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch from imgw_pib import HydrologicalData, SensorData import pytest -from typing_extensions import Generator from homeassistant.components.imgw_pib.const import DOMAIN diff --git a/tests/components/imgw_pib/test_diagnostics.py b/tests/components/imgw_pib/test_diagnostics.py index 62dabc982c4..14d4e7a5224 100644 --- a/tests/components/imgw_pib/test_diagnostics.py +++ b/tests/components/imgw_pib/test_diagnostics.py @@ -28,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/improv_ble/test_config_flow.py b/tests/components/improv_ble/test_config_flow.py index 53da1f28425..640a931bee5 100644 --- a/tests/components/improv_ble/test_config_flow.py +++ b/tests/components/improv_ble/test_config_flow.py @@ -543,7 +543,7 @@ async def test_authorize_fails(hass: HomeAssistant, exc, error) -> None: assert result["reason"] == error -async def _test_provision_error(hass: HomeAssistant, exc) -> None: +async def _test_provision_error(hass: HomeAssistant, exc) -> str: """Test bluetooth flow with error.""" result = await hass.config_entries.flow.async_init( DOMAIN, diff --git a/tests/components/incomfort/conftest.py b/tests/components/incomfort/conftest.py index 64885e38b65..f17547a1445 100644 --- a/tests/components/incomfort/conftest.py +++ b/tests/components/incomfort/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Intergas InComfort integration.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from incomfortclient import DisplayCode import pytest -from typing_extensions import Generator from homeassistant.components.incomfort import DOMAIN from homeassistant.config_entries import ConfigEntry @@ -77,10 +77,9 @@ def mock_room_status() -> dict[str, Any]: @pytest.fixture def mock_incomfort( - hass: HomeAssistant, mock_heater_status: dict[str, Any], mock_room_status: dict[str, Any], -) -> Generator[MagicMock, None]: +) -> Generator[MagicMock]: """Mock the InComfort gateway client.""" class MockRoom: diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index aba153cf8a8..e9592a06fe2 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -1,5 +1,6 @@ """The tests for the InfluxDB component.""" +from collections.abc import Generator from dataclasses import dataclass import datetime from http import HTTPStatus @@ -7,7 +8,6 @@ import logging from unittest.mock import ANY, MagicMock, Mock, call, patch import pytest -from typing_extensions import Generator from homeassistant.components import influxdb from homeassistant.components.influxdb.const import DEFAULT_BUCKET @@ -43,7 +43,7 @@ class FilterTest: @pytest.fixture(autouse=True) -def mock_batch_timeout(hass, monkeypatch): +def mock_batch_timeout(monkeypatch: pytest.MonkeyPatch) -> None: """Mock the event bus listener and the batch timeout for tests.""" monkeypatch.setattr( f"{INFLUX_PATH}.InfluxThread.batch_timeout", @@ -79,7 +79,6 @@ def get_mock_call_fixture(request: pytest.FixtureRequest): if request.param == influxdb.API_VERSION_2: return lambda body, precision=None: v2_call(body, precision) - # pylint: disable-next=unnecessary-lambda return lambda body, precision=None: call(body, time_precision=precision) diff --git a/tests/components/influxdb/test_sensor.py b/tests/components/influxdb/test_sensor.py index 48cae2a3ae6..73dd8375a00 100644 --- a/tests/components/influxdb/test_sensor.py +++ b/tests/components/influxdb/test_sensor.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import dataclass from datetime import timedelta from http import HTTPStatus @@ -10,7 +11,6 @@ from unittest.mock import MagicMock, patch from influxdb.exceptions import InfluxDBClientError, InfluxDBServerError from influxdb_client.rest import ApiException import pytest -from typing_extensions import Generator from voluptuous import Invalid from homeassistant.components import sensor diff --git a/tests/components/insteon/test_api_scenes.py b/tests/components/insteon/test_api_scenes.py index 1b8d4d50f08..14001e0495d 100644 --- a/tests/components/insteon/test_api_scenes.py +++ b/tests/components/insteon/test_api_scenes.py @@ -1,7 +1,8 @@ """Test the Insteon Scenes APIs.""" -import json +from collections.abc import Generator import os +from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.constants import ResponseStatus @@ -11,21 +12,22 @@ import pytest from homeassistant.components.insteon.api import async_load_api, scenes from homeassistant.components.insteon.const import ID, TYPE from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonArrayType from .mock_devices import MockDevices -from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.common import load_json_array_fixture +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="scene_data", scope="module") -def aldb_data_fixture(): +def aldb_data_fixture() -> JsonArrayType: """Load the controller state fixture data.""" - return json.loads(load_fixture("insteon/scene_data.json")) + return load_json_array_fixture("insteon/scene_data.json") @pytest.fixture(name="remove_json") -def remove_insteon_devices_json(hass): +def remove_insteon_devices_json(hass: HomeAssistant) -> Generator[None]: """Fixture to remove insteon_devices.json at the end of the test.""" yield file = os.path.join(hass.config.config_dir, "insteon_devices.json") @@ -33,7 +35,7 @@ def remove_insteon_devices_json(hass): os.remove(file) -def _scene_to_array(scene): +def _scene_to_array(scene: dict[str, Any]) -> list[dict[str, Any]]: """Convert a scene object to a dictionary.""" scene_list = [] for device, links in scene["devices"].items(): @@ -47,7 +49,9 @@ def _scene_to_array(scene): return scene_list -async def _setup(hass, hass_ws_client, scene_data): +async def _setup( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() @@ -63,7 +67,7 @@ async def _setup(hass, hass_ws_client, scene_data): # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scenes( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType ) -> None: """Test getting all Insteon scenes.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -79,7 +83,7 @@ async def test_get_scenes( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_get_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data: JsonArrayType ) -> None: """Test getting an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -93,8 +97,11 @@ async def test_get_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -125,8 +132,11 @@ async def test_save_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_new_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving a new Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -157,8 +167,11 @@ async def test_save_new_scene( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_save_scene_error( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test saving an Insteon scene with error.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) @@ -189,8 +202,11 @@ async def test_save_scene_error( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.usefixtures("remove_json") async def test_delete_scene( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + scene_data: JsonArrayType, ) -> None: """Test delete an Insteon scene.""" ws_client, devices = await _setup(hass, hass_ws_client, scene_data) diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index 1aae4fb6dd6..cf1e085c10f 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,10 +1,10 @@ """Fixtures for IntelliFire integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiohttp.client_reqrep import ConnectionKey import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 5f4c7b97b63..86f3a7aba46 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -3,11 +3,11 @@ from unittest.mock import patch from homeassistant import config as hass_config -from homeassistant.bootstrap import async_setup_component from homeassistant.components.intent_script import DOMAIN from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import intent +from homeassistant.setup import async_setup_component from tests.common import async_mock_service, get_fixture_path diff --git a/tests/components/ios/test_init.py b/tests/components/ios/test_init.py index afefec1530c..ddf5835a1be 100644 --- a/tests/components/ios/test_init.py +++ b/tests/components/ios/test_init.py @@ -19,7 +19,7 @@ def mock_load_json(): @pytest.fixture(autouse=True) -def mock_dependencies(hass): +def mock_dependencies(hass: HomeAssistant) -> None: """Mock dependencies loaded.""" mock_component(hass, "zeroconf") mock_component(hass, "device_tracker") diff --git a/tests/components/iotawatt/conftest.py b/tests/components/iotawatt/conftest.py index f3a60e69021..9380154b53e 100644 --- a/tests/components/iotawatt/conftest.py +++ b/tests/components/iotawatt/conftest.py @@ -1,16 +1,18 @@ """Test fixtures for IoTaWatt.""" -from unittest.mock import AsyncMock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch import pytest from homeassistant.components.iotawatt import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def entry(hass): +def entry(hass: HomeAssistant) -> MockConfigEntry: """Mock config entry added to HA.""" entry = MockConfigEntry(domain=DOMAIN, data={"host": "1.2.3.4"}) entry.add_to_hass(hass) @@ -18,7 +20,7 @@ def entry(hass): @pytest.fixture -def mock_iotawatt(entry): +def mock_iotawatt(entry: MockConfigEntry) -> Generator[MagicMock]: """Mock iotawatt.""" with patch("homeassistant.components.iotawatt.coordinator.Iotawatt") as mock: instance = mock.return_value diff --git a/tests/components/iotawatt/test_init.py b/tests/components/iotawatt/test_init.py index 8b707780eb4..de3a2f9f829 100644 --- a/tests/components/iotawatt/test_init.py +++ b/tests/components/iotawatt/test_init.py @@ -1,5 +1,7 @@ """Test init.""" +from unittest.mock import MagicMock + import httpx from homeassistant.config_entries import ConfigEntryState @@ -8,8 +10,12 @@ from homeassistant.setup import async_setup_component from . import INPUT_SENSOR +from tests.common import MockConfigEntry -async def test_setup_unload(hass: HomeAssistant, mock_iotawatt, entry) -> None: + +async def test_setup_unload( + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry +) -> None: """Test we can setup and unload an entry.""" mock_iotawatt.getSensors.return_value["sensors"]["my_sensor_key"] = INPUT_SENSOR assert await async_setup_component(hass, "iotawatt", {}) @@ -18,7 +24,7 @@ async def test_setup_unload(hass: HomeAssistant, mock_iotawatt, entry) -> None: async def test_setup_connection_failed( - hass: HomeAssistant, mock_iotawatt, entry + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry ) -> None: """Test connection error during startup.""" mock_iotawatt.connect.side_effect = httpx.ConnectError("") @@ -27,7 +33,9 @@ async def test_setup_connection_failed( assert entry.state is ConfigEntryState.SETUP_RETRY -async def test_setup_auth_failed(hass: HomeAssistant, mock_iotawatt, entry) -> None: +async def test_setup_auth_failed( + hass: HomeAssistant, mock_iotawatt: MagicMock, entry: MockConfigEntry +) -> None: """Test auth error during startup.""" mock_iotawatt.connect.return_value = False assert await async_setup_component(hass, "iotawatt", {}) diff --git a/tests/components/iotawatt/test_sensor.py b/tests/components/iotawatt/test_sensor.py index ecf2f97c67a..eb1a240a82f 100644 --- a/tests/components/iotawatt/test_sensor.py +++ b/tests/components/iotawatt/test_sensor.py @@ -1,6 +1,7 @@ """Test setting up sensors.""" from datetime import timedelta +from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -25,7 +26,7 @@ from tests.common import async_fire_time_changed async def test_sensor_type_input( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock ) -> None: """Test input sensors work.""" assert await async_setup_component(hass, "iotawatt", {}) @@ -60,7 +61,7 @@ async def test_sensor_type_input( async def test_sensor_type_output( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt + hass: HomeAssistant, freezer: FrozenDateTimeFactory, mock_iotawatt: MagicMock ) -> None: """Tests the sensor type of Output.""" mock_iotawatt.getSensors.return_value["sensors"]["my_watthour_sensor_key"] = ( diff --git a/tests/components/iotty/__init__.py b/tests/components/iotty/__init__.py new file mode 100644 index 00000000000..705b8218c8b --- /dev/null +++ b/tests/components/iotty/__init__.py @@ -0,0 +1 @@ +"""Tests for iotty.""" diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py new file mode 100644 index 00000000000..7961a4ce3a1 --- /dev/null +++ b/tests/components/iotty/conftest.py @@ -0,0 +1,180 @@ +"""Fixtures for iotty integration tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from aiohttp import ClientSession +from iottycloud.device import Device +from iottycloud.lightswitch import LightSwitch +from iottycloud.verbs import LS_DEVICE_TYPE_UID, RESULT, STATUS, STATUS_OFF, STATUS_ON +import pytest + +from homeassistant import setup +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker, mock_aiohttp_client + +CLIENT_ID = "client_id" +CLIENT_SECRET = "client_secret" +REDIRECT_URI = "https://example.com/auth/external/callback" + +test_devices = [ + Device("TestDevice0", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Device Name 0"), + Device("TestDevice1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Device Name 1"), +] + + +ls_0 = LightSwitch( + "TestLS", "TEST_SERIAL_0", LS_DEVICE_TYPE_UID, "[TEST] Light switch 0" +) +ls_1 = LightSwitch( + "TestLS1", "TEST_SERIAL_1", LS_DEVICE_TYPE_UID, "[TEST] Light switch 1" +) +ls_2 = LightSwitch( + "TestLS2", "TEST_SERIAL_2", LS_DEVICE_TYPE_UID, "[TEST] Light switch 2" +) + +test_ls = [ls_0, ls_1] + +test_ls_one_removed = [ls_0] + +test_ls_one_added = [ + ls_0, + ls_1, + ls_2, +] + + +@pytest.fixture +async def local_oauth_impl(hass: HomeAssistant): + """Local implementation.""" + assert await setup.async_setup_component(hass, "auth", {}) + return config_entry_oauth2_flow.LocalOAuth2Implementation( + hass, DOMAIN, "client_id", "client_secret", "authorize_url", "https://token.url" + ) + + +@pytest.fixture +def aiohttp_client_session() -> None: + """AIOHTTP client session.""" + return ClientSession + + +@pytest.fixture +def mock_aioclient() -> Generator[AiohttpClientMocker, None, None]: + """Fixture to mock aioclient calls.""" + with mock_aiohttp_client() as mock_session: + yield mock_session + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="IOTTY00001", + domain=DOMAIN, + data={ + "auth_implementation": DOMAIN, + "token": { + "refresh_token": "REFRESH_TOKEN", + "access_token": "ACCESS_TOKEN_1", + "expires_in": 10, + "expires_at": 0, + "token_type": "bearer", + "random_other_data": "should_stay", + }, + CONF_HOST: "127.0.0.1", + CONF_MAC: "AA:BB:CC:DD:EE:FF", + CONF_PORT: 9123, + }, + unique_id="IOTTY00001", + ) + + +@pytest.fixture +def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock, None, None]: + """Mock async_forward_entry_setup.""" + with patch( + "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups" + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.iotty.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +def mock_iotty() -> Generator[None, MagicMock, None]: + """Mock IottyProxy.""" + with patch( + "homeassistant.components.iotty.api.IottyProxy", autospec=True + ) as iotty_mock: + yield iotty_mock + + +@pytest.fixture +def mock_coordinator() -> Generator[None, MagicMock, None]: + """Mock IottyDataUpdateCoordinator.""" + with patch( + "homeassistant.components.iotty.coordinator.IottyDataUpdateCoordinator", + autospec=True, + ) as coordinator_mock: + yield coordinator_mock + + +@pytest.fixture +def mock_get_devices_nodevices() -> Generator[AsyncMock, None, None]: + """Mock for get_devices, returning two objects.""" + + with patch("iottycloud.cloudapi.CloudApi.get_devices") as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_devices_twolightswitches() -> Generator[AsyncMock, None, None]: + """Mock for get_devices, returning two objects.""" + + with patch( + "iottycloud.cloudapi.CloudApi.get_devices", return_value=test_ls + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_command_fn() -> Generator[AsyncMock, None, None]: + """Mock for command.""" + + with patch("iottycloud.cloudapi.CloudApi.command", return_value=None) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled_off() -> Generator[AsyncMock, None, None]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_OFF}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn + + +@pytest.fixture +def mock_get_status_filled() -> Generator[AsyncMock, None, None]: + """Mock setting up a get_status.""" + + retval = {RESULT: {STATUS: STATUS_ON}} + with patch( + "iottycloud.cloudapi.CloudApi.get_status", return_value=retval + ) as mock_fn: + yield mock_fn diff --git a/tests/components/iotty/snapshots/test_switch.ambr b/tests/components/iotty/snapshots/test_switch.ambr new file mode 100644 index 00000000000..8ec22ed162a --- /dev/null +++ b/tests/components/iotty/snapshots/test_switch.ambr @@ -0,0 +1,126 @@ +# serializer version: 1 +# name: test_api_not_ok_entities_stay_the_same_as_before + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_api_throws_response_entities_stay_the_same_as_before + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_creaction_ok[device] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'iotty', + 'TestLS', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'iotty', + 'model': None, + 'model_id': None, + 'name': '[TEST] Light switch 0 (TEST_SERIAL_0)', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices_creaction_ok[entity-ids] + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_creaction_ok[entity] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.test_light_switch_0_test_serial_0', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'iotty', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'TestLS', + 'unit_of_measurement': None, + }) +# --- +# name: test_devices_creaction_ok[state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': '[TEST] Light switch 0 (TEST_SERIAL_0)', + }), + 'context': , + 'entity_id': 'switch.test_light_switch_0_test_serial_0', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_devices_deletion_ok + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_deletion_ok.1 + list([ + 'switch.test_light_switch_0_test_serial_0', + ]) +# --- +# name: test_devices_insertion_ok + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + ]) +# --- +# name: test_devices_insertion_ok.1 + list([ + 'switch.test_light_switch_0_test_serial_0', + 'switch.test_light_switch_1_test_serial_1', + 'switch.test_light_switch_2_test_serial_2', + ]) +# --- +# name: test_setup_entry_ok_nodevices + list([ + ]) +# --- diff --git a/tests/components/iotty/test_api.py b/tests/components/iotty/test_api.py new file mode 100644 index 00000000000..6bb396f5d4d --- /dev/null +++ b/tests/components/iotty/test_api.py @@ -0,0 +1,82 @@ +"""Unit tests for iottycloud API.""" + +from unittest.mock import patch + +from aiohttp import ClientSession +import pytest + +from homeassistant.components.iotty import api +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + + +async def test_api_create_fail( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test API creation with no session.""" + + with pytest.raises(ValueError, match="websession"): + api.IottyProxy(hass, None, None) + + with pytest.raises(ValueError, match="oauth_session"): + api.IottyProxy(hass, aioclient_mock, None) + + +async def test_api_create_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + aiohttp_client_session: None, + local_oauth_impl: ClientSession, +) -> None: + """Test API creation. We're checking that we can create an IottyProxy without raising.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + api.IottyProxy(hass, aiohttp_client_session, local_oauth_impl) + + +@patch( + "homeassistant.helpers.config_entry_oauth2_flow.OAuth2Session.valid_token", False +) +async def test_api_getaccesstoken_tokennotvalid_reloadtoken( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_aioclient: None, + aiohttp_client_session: ClientSession, +) -> None: + """Test getting access token. + + If a request with an invalid token is made, a request for a new token is done, + and the resulting token is used for future calls. + """ + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + new_token = "ACCESS_TOKEN_1" + + mock_aioclient.post( + "https://token.url", json={"access_token": new_token, "expires_in": 100} + ) + + mock_aioclient.post("https://example.com", status=201) + + mock_config_entry.add_to_hass(hass) + oauth2_session = config_entry_oauth2_flow.OAuth2Session( + hass, mock_config_entry, local_oauth_impl + ) + + iotty = api.IottyProxy(hass, aiohttp_client_session, oauth2_session) + + tok = await iotty.async_get_access_token() + assert tok == new_token diff --git a/tests/components/iotty/test_config_flow.py b/tests/components/iotty/test_config_flow.py new file mode 100644 index 00000000000..83fa16ece56 --- /dev/null +++ b/tests/components/iotty/test_config_flow.py @@ -0,0 +1,102 @@ +"""Test the iotty config flow.""" + +from http import HTTPStatus +from unittest.mock import AsyncMock, MagicMock + +import multidict +import pytest + +from homeassistant import config_entries +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.iotty.application_credentials import OAUTH2_TOKEN +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component + +from .conftest import CLIENT_ID, CLIENT_SECRET, REDIRECT_URI + +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +@pytest.fixture +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup application credentials component.""" + await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture +def current_request_with_host(current_request: MagicMock) -> None: + """Mock current request with a host header.""" + new_headers = multidict.CIMultiDict(current_request.get.return_value.headers) + new_headers[config_entry_oauth2_flow.HEADER_FRONTEND_BASE] = "https://example.com" + current_request.get.return_value = current_request.get.return_value.clone( + headers=new_headers + ) + + +async def test_config_flow_no_credentials(hass: HomeAssistant) -> None: + """Test config flow base case with no credentials registered.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result.get("type") == FlowResultType.ABORT + assert result.get("reason") == "missing_credentials" + + +@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry: AsyncMock, +) -> None: + """Check full flow.""" + + await async_import_client_credential( + hass, DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER, "entry_id": DOMAIN} + ) + + assert result.get("type") == FlowResultType.EXTERNAL_STEP + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT_URI, + }, + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == HTTPStatus.OK + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/iotty/test_init.py b/tests/components/iotty/test_init.py new file mode 100644 index 00000000000..ee8168fdf2f --- /dev/null +++ b/tests/components/iotty/test_init.py @@ -0,0 +1,73 @@ +"""Tests for the iotty integration.""" + +from unittest.mock import MagicMock + +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry + + +async def test_load_unload_coordinator_called( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_coordinator: MagicMock, + local_oauth_impl, +) -> None: + """Test the configuration entry loading/unloading.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + await hass.async_block_till_done() + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + mock_coordinator.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.LOADED + method_call = mock_coordinator.method_calls[0] + name, _, _ = method_call + assert name == "().async_config_entry_first_refresh" + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_load_unload_iottyproxy_called( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_iotty: MagicMock, + local_oauth_impl, + mock_config_entries_async_forward_entry_setup, +) -> None: + """Test the configuration entry loading/unloading.""" + + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data["auth_implementation"] is not None + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + mock_iotty.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.LOADED + method_call = mock_iotty.method_calls[0] + name, _, _ = method_call + assert name == "().get_devices" + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/iotty/test_switch.py b/tests/components/iotty/test_switch.py new file mode 100644 index 00000000000..235a897c305 --- /dev/null +++ b/tests/components/iotty/test_switch.py @@ -0,0 +1,300 @@ +"""Unit tests the Hass SWITCH component.""" + +from aiohttp import ClientSession +from freezegun.api import FrozenDateTimeFactory +from iottycloud.verbs import RESULT, STATUS, STATUS_OFF, STATUS_ON +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.iotty.const import DOMAIN +from homeassistant.components.iotty.coordinator import UPDATE_INTERVAL +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import ( + config_entry_oauth2_flow, + device_registry as dr, + entity_registry as er, +) + +from .conftest import test_ls_one_added, test_ls_one_removed + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_turn_on_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled_off, + mock_command_fn, +) -> None: + """Issue a turnon command.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_OFF + + mock_get_status_filled_off.return_value = {RESULT: {STATUS: STATUS_ON}} + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_ON + + +async def test_turn_off_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + mock_command_fn, +) -> None: + """Issue a turnoff command.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_ON + + mock_get_status_filled.return_value = {RESULT: {STATUS: STATUS_OFF}} + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + await hass.async_block_till_done() + mock_command_fn.assert_called_once() + + assert (state := hass.states.get(entity_id)) + assert state.state == STATUS_OFF + + +async def test_setup_entry_ok_nodevices( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_status_filled, + snapshot: SnapshotAssertion, + mock_get_devices_nodevices, +) -> None: + """Correctly setup, with no iotty Devices to add to Hass.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert hass.states.async_entity_ids_count() == 0 + assert hass.states.async_entity_ids() == snapshot + + +async def test_devices_creaction_ok( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, +) -> None: + """Test iotty switch creation.""" + + entity_id = "switch.test_light_switch_0_test_serial_0" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert (state := hass.states.get(entity_id)) + assert state == snapshot(name="state") + + assert (entry := entity_registry.async_get(entity_id)) + assert entry == snapshot(name="entity") + + assert entry.device_id + assert (device_entry := device_registry.async_get(entry.device_id)) + assert device_entry == snapshot(name="device") + + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot(name="entity-ids") + + +async def test_devices_deletion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch deletion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_removed + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have one device + assert hass.states.async_entity_ids_count() == 1 + assert hass.states.async_entity_ids() == snapshot + + +async def test_devices_insertion_ok( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test iotty switch insertion.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_added + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should have three devices + assert hass.states.async_entity_ids_count() == 3 + assert hass.states.async_entity_ids() == snapshot + + +async def test_api_not_ok_entities_stay_the_same_as_before( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test case of incorrect response from iotty API on getting device status.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + entity_ids = hass.states.async_entity_ids() + assert entity_ids == snapshot + + mock_get_status_filled.return_value = {RESULT: "Not a valid restul"} + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should still have have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == entity_ids + + +async def test_api_throws_response_entities_stay_the_same_as_before( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + local_oauth_impl: ClientSession, + mock_get_devices_twolightswitches, + mock_get_status_filled, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test case of incorrect response from iotty API on getting device status.""" + + mock_config_entry.add_to_hass(hass) + + config_entry_oauth2_flow.async_register_implementation( + hass, DOMAIN, local_oauth_impl + ) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + # Should have two devices + assert hass.states.async_entity_ids_count() == 2 + entity_ids = hass.states.async_entity_ids() + assert entity_ids == snapshot + + mock_get_devices_twolightswitches.return_value = test_ls_one_added + mock_get_status_filled.side_effect = Exception("Something went wrong") + + freezer.tick(UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should still have have two devices + assert hass.states.async_entity_ids_count() == 2 + assert hass.states.async_entity_ids() == entity_ids diff --git a/tests/components/ipma/__init__.py b/tests/components/ipma/__init__.py index 799120e3966..ab5998c922f 100644 --- a/tests/components/ipma/__init__.py +++ b/tests/components/ipma/__init__.py @@ -108,6 +108,7 @@ class MockLocation: location=Forecast_Location(0, "", 0, 0, 0, "", (0, 0)), ), ] + raise ValueError(f"Unknown forecast period: {period}") name = "HomeTown" station = "HomeTown Station" diff --git a/tests/components/ipma/conftest.py b/tests/components/ipma/conftest.py index 7f3e82a8819..8f2a017dcb8 100644 --- a/tests/components/ipma/conftest.py +++ b/tests/components/ipma/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry(hass): +def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/ipma/test_config_flow.py b/tests/components/ipma/test_config_flow.py index 38bb1dbf126..2a4c3517b2a 100644 --- a/tests/components/ipma/test_config_flow.py +++ b/tests/components/ipma/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for IPMA config flow.""" +from collections.abc import Generator from unittest.mock import patch from pyipma import IPMAException import pytest -from typing_extensions import Generator from homeassistant.components.ipma.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -14,6 +14,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import MockLocation +from tests.common import MockConfigEntry + @pytest.fixture(name="ipma_setup", autouse=True) def ipma_setup_fixture() -> Generator[None]: @@ -93,7 +95,9 @@ async def test_config_flow_failures(hass: HomeAssistant) -> None: } -async def test_flow_entry_already_exists(hass: HomeAssistant, init_integration) -> None: +async def test_flow_entry_already_exists( + hass: HomeAssistant, init_integration: MockConfigEntry +) -> None: """Test user input for config_entry that already exists. Test when the form should show when user puts existing location diff --git a/tests/components/ipma/test_diagnostics.py b/tests/components/ipma/test_diagnostics.py index b7d421a2ee5..26e54454947 100644 --- a/tests/components/ipma/test_diagnostics.py +++ b/tests/components/ipma/test_diagnostics.py @@ -4,6 +4,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -11,7 +12,7 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - init_integration, + init_integration: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" diff --git a/tests/components/ipma/test_weather.py b/tests/components/ipma/test_weather.py index b7ef1347ca5..997eb582083 100644 --- a/tests/components/ipma/test_weather.py +++ b/tests/components/ipma/test_weather.py @@ -4,6 +4,7 @@ import datetime from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory +from pyipma.observation import Observation import pytest from syrupy.assertion import SnapshotAssertion @@ -43,7 +44,7 @@ TEST_CONFIG_HOURLY = { class MockBadLocation(MockLocation): """Mock Location with unresponsive api.""" - async def observation(self, api): + async def observation(self, api) -> Observation | None: """Mock Observation.""" return None diff --git a/tests/components/ipp/conftest.py b/tests/components/ipp/conftest.py index 5e39a16f3b1..9a47cc3c355 100644 --- a/tests/components/ipp/conftest.py +++ b/tests/components/ipp/conftest.py @@ -1,11 +1,11 @@ """Fixtures for IPP integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from pyipp import Printer import pytest -from typing_extensions import Generator from homeassistant.components.ipp.const import CONF_BASE_PATH, DOMAIN from homeassistant.const import ( diff --git a/tests/components/iqvia/conftest.py b/tests/components/iqvia/conftest.py index 6fb14ca4d28..0d23b825c5a 100644 --- a/tests/components/iqvia/conftest.py +++ b/tests/components/iqvia/conftest.py @@ -1,18 +1,23 @@ """Define test fixtures for IQVIA.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import patch import pytest from homeassistant.components.iqvia.const import CONF_ZIP_CODE, DOMAIN +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -25,7 +30,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_ZIP_CODE: "12345", @@ -33,59 +38,59 @@ def config_fixture(hass): @pytest.fixture(name="data_allergy_forecast", scope="package") -def data_allergy_forecast_fixture(): +def data_allergy_forecast_fixture() -> JsonObjectType: """Define allergy forecast data.""" - return json.loads(load_fixture("allergy_forecast_data.json", "iqvia")) + return load_json_object_fixture("allergy_forecast_data.json", "iqvia") @pytest.fixture(name="data_allergy_index", scope="package") -def data_allergy_index_fixture(): +def data_allergy_index_fixture() -> JsonObjectType: """Define allergy index data.""" - return json.loads(load_fixture("allergy_index_data.json", "iqvia")) + return load_json_object_fixture("allergy_index_data.json", "iqvia") @pytest.fixture(name="data_allergy_outlook", scope="package") -def data_allergy_outlook_fixture(): +def data_allergy_outlook_fixture() -> JsonObjectType: """Define allergy outlook data.""" - return json.loads(load_fixture("allergy_outlook_data.json", "iqvia")) + return load_json_object_fixture("allergy_outlook_data.json", "iqvia") @pytest.fixture(name="data_asthma_forecast", scope="package") -def data_asthma_forecast_fixture(): +def data_asthma_forecast_fixture() -> JsonObjectType: """Define asthma forecast data.""" - return json.loads(load_fixture("asthma_forecast_data.json", "iqvia")) + return load_json_object_fixture("asthma_forecast_data.json", "iqvia") @pytest.fixture(name="data_asthma_index", scope="package") -def data_asthma_index_fixture(): +def data_asthma_index_fixture() -> JsonObjectType: """Define asthma index data.""" - return json.loads(load_fixture("asthma_index_data.json", "iqvia")) + return load_json_object_fixture("asthma_index_data.json", "iqvia") @pytest.fixture(name="data_disease_forecast", scope="package") -def data_disease_forecast_fixture(): +def data_disease_forecast_fixture() -> JsonObjectType: """Define disease forecast data.""" - return json.loads(load_fixture("disease_forecast_data.json", "iqvia")) + return load_json_object_fixture("disease_forecast_data.json", "iqvia") @pytest.fixture(name="data_disease_index", scope="package") -def data_disease_index_fixture(): +def data_disease_index_fixture() -> JsonObjectType: """Define disease index data.""" - return json.loads(load_fixture("disease_index_data.json", "iqvia")) + return load_json_object_fixture("disease_index_data.json", "iqvia") @pytest.fixture(name="setup_iqvia") async def setup_iqvia_fixture( - hass, - config, - data_allergy_forecast, - data_allergy_index, - data_allergy_outlook, - data_asthma_forecast, - data_asthma_index, - data_disease_forecast, - data_disease_index, -): + hass: HomeAssistant, + config: dict[str, Any], + data_allergy_forecast: JsonObjectType, + data_allergy_index: JsonObjectType, + data_allergy_outlook: JsonObjectType, + data_asthma_forecast: JsonObjectType, + data_asthma_index: JsonObjectType, + data_disease_forecast: JsonObjectType, + data_disease_index: JsonObjectType, +) -> AsyncGenerator[None]: """Define a fixture to set up IQVIA.""" with ( patch( diff --git a/tests/components/iqvia/test_config_flow.py b/tests/components/iqvia/test_config_flow.py index 17c977a6b4c..22f473a3fb5 100644 --- a/tests/components/iqvia/test_config_flow.py +++ b/tests/components/iqvia/test_config_flow.py @@ -1,12 +1,17 @@ """Define tests for the IQVIA config flow.""" +from typing import Any + +import pytest + from homeassistant.components.iqvia import CONF_ZIP_CODE, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_duplicate_error(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that errors are shown when duplicates are added.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config @@ -33,7 +38,8 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_step_user(hass: HomeAssistant, config, setup_iqvia) -> None: +@pytest.mark.usefixtures("setup_iqvia") +async def test_step_user(hass: HomeAssistant, config: dict[str, Any]) -> None: """Test that the user step works (without MFA).""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=config diff --git a/tests/components/iqvia/test_diagnostics.py b/tests/components/iqvia/test_diagnostics.py index 7c445c9b3e4..9d5639c311c 100644 --- a/tests/components/iqvia/test_diagnostics.py +++ b/tests/components/iqvia/test_diagnostics.py @@ -1,23 +1,24 @@ """Test IQVIA diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_entry_diagnostics( hass: HomeAssistant, - config_entry, + config_entry: MockConfigEntry, hass_client: ClientSessionGenerator, - setup_iqvia, + setup_iqvia: None, # Needs to be injected after config_entry snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/iron_os/__init__.py b/tests/components/iron_os/__init__.py new file mode 100644 index 00000000000..4e27f2c741c --- /dev/null +++ b/tests/components/iron_os/__init__.py @@ -0,0 +1 @@ +"""Tests for the Pinecil integration.""" diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py new file mode 100644 index 00000000000..b6983074441 --- /dev/null +++ b/tests/components/iron_os/conftest.py @@ -0,0 +1,141 @@ +"""Fixtures for Pinecil tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from bleak.backends.device import BLEDevice +from habluetooth import BluetoothServiceInfoBleak +from pynecil import DeviceInfoResponse, LiveDataResponse, OperatingMode, PowerSource +import pytest + +from homeassistant.components.iron_os import DOMAIN +from homeassistant.const import CONF_ADDRESS + +from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device + +USER_INPUT = {CONF_ADDRESS: "c0:ff:ee:c0:ff:ee"} +DEFAULT_NAME = "Pinecil-C0FFEEE" +PINECIL_SERVICE_INFO = BluetoothServiceInfoBleak( + name="Pinecil-C0FFEEE", + address="c0:ff:ee:c0:ff:ee", + device=generate_ble_device( + address="c0:ff:ee:c0:ff:ee", + name="Pinecil-C0FFEEE", + ), + rssi=-61, + manufacturer_data={}, + service_data={}, + service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={}, + service_uuids=["9eae1000-9d0d-48c5-aa55-33e27f9bc533"], + ), + connectable=True, + time=0, + tx_power=None, +) + +UNKNOWN_SERVICE_INFO = BluetoothServiceInfoBleak( + name="", + address="c0:ff:ee:c0:ff:ee", + device=generate_ble_device( + address="c0:ff:ee:c0:ff:ee", + name="", + ), + rssi=-61, + manufacturer_data={}, + service_data={}, + service_uuids=[], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={}, + service_uuids=[], + ), + connectable=True, + time=0, + tx_power=None, +) + + +@pytest.fixture(autouse=True) +def mock_bluetooth(enable_bluetooth: None) -> None: + """Auto mock bluetooth.""" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.iron_os.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="discovery") +def mock_async_discovered_service_info() -> Generator[MagicMock]: + """Mock service discovery.""" + with patch( + "homeassistant.components.iron_os.config_flow.async_discovered_service_info", + return_value=[PINECIL_SERVICE_INFO, UNKNOWN_SERVICE_INFO], + ) as discovery: + yield discovery + + +@pytest.fixture(name="config_entry") +def mock_config_entry() -> MockConfigEntry: + """Mock Pinecil configuration entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + data={}, + unique_id="c0:ff:ee:c0:ff:ee", + entry_id="1234567890", + ) + + +@pytest.fixture(name="ble_device") +def mock_ble_device() -> Generator[MagicMock]: + """Mock BLEDevice.""" + with patch( + "homeassistant.components.bluetooth.async_ble_device_from_address", + return_value=BLEDevice( + address="c0:ff:ee:c0:ff:ee", name=DEFAULT_NAME, rssi=-50, details={} + ), + ) as ble_device: + yield ble_device + + +@pytest.fixture +def mock_pynecil() -> Generator[AsyncMock, None, None]: + """Mock Pynecil library.""" + with patch( + "homeassistant.components.iron_os.Pynecil", autospec=True + ) as mock_client: + client = mock_client.return_value + + client.get_device_info.return_value = DeviceInfoResponse( + build="v2.22", + device_id="c0ffeeC0", + address="c0:ff:ee:c0:ff:ee", + device_sn="0000c0ffeec0ffee", + name=DEFAULT_NAME, + ) + client.get_live_data.return_value = LiveDataResponse( + live_temp=298, + setpoint_temp=300, + dc_voltage=20.6, + handle_temp=36.3, + pwm_level=41, + power_src=PowerSource.PD, + tip_resistance=6.2, + uptime=1671, + movement_time=10000, + max_tip_temp_ability=460, + tip_voltage=2212, + hall_sensor=0, + operating_mode=OperatingMode.SOLDERING, + estimated_power=24.8, + ) + yield client diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr new file mode 100644 index 00000000000..2f5ee62e37e --- /dev/null +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_state[number.pinecil_setpoint_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.pinecil_setpoint_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Setpoint temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_setpoint_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_setpoint_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Setpoint temperature', + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_setpoint_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '300', + }) +# --- diff --git a/tests/components/iron_os/snapshots/test_sensor.ambr b/tests/components/iron_os/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..64cb951dacc --- /dev/null +++ b/tests/components/iron_os/snapshots/test_sensor.ambr @@ -0,0 +1,683 @@ +# serializer version: 1 +# name: test_sensors[sensor.pinecil_dc_input_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_dc_input_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC input voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_dc_input_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil DC input voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_dc_input_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.6', + }) +# --- +# name: test_sensors[sensor.pinecil_estimated_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_estimated_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Estimated power', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_estimated_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_estimated_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Pinecil Estimated power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_estimated_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24.8', + }) +# --- +# name: test_sensors[sensor.pinecil_hall_effect_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_hall_effect_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Hall effect strength', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_hall_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_hall_effect_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Hall effect strength', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_hall_effect_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.pinecil_handle_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_handle_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Handle temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_handle_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_handle_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Handle temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_handle_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36.3', + }) +# --- +# name: test_sensors[sensor.pinecil_last_movement_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_last_movement_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last movement time', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_movement_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_last_movement_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Last movement time', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_last_movement_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10000', + }) +# --- +# name: test_sensors[sensor.pinecil_max_tip_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_max_tip_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max tip temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_max_tip_temp_ability', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_max_tip_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Max tip temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_max_tip_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '460', + }) +# --- +# name: test_sensors[sensor.pinecil_operating_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'idle', + 'soldering', + 'boost', + 'sleeping', + 'settings', + 'debug', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_operating_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Operating mode', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_operating_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_operating_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Pinecil Operating mode', + 'options': list([ + 'idle', + 'soldering', + 'boost', + 'sleeping', + 'settings', + 'debug', + ]), + }), + 'context': , + 'entity_id': 'sensor.pinecil_operating_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'soldering', + }) +# --- +# name: test_sensors[sensor.pinecil_power_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_power_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power level', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_pwm_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.pinecil_power_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Pinecil Power level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.pinecil_power_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '41', + }) +# --- +# name: test_sensors[sensor.pinecil_power_source-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'dc', + 'qc', + 'pd_vbus', + 'pd', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_power_source', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power source', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_power_source', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.pinecil_power_source-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Pinecil Power source', + 'options': list([ + 'dc', + 'qc', + 'pd_vbus', + 'pd', + ]), + }), + 'context': , + 'entity_id': 'sensor.pinecil_power_source', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pd', + }) +# --- +# name: test_sensors[sensor.pinecil_raw_tip_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_raw_tip_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Raw tip voltage', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_raw_tip_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Pinecil Raw tip voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_raw_tip_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2212', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_resistance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_tip_resistance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tip resistance', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_tip_resistance', + 'unit_of_measurement': 'Ω', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_resistance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Pinecil Tip resistance', + 'unit_of_measurement': 'Ω', + }), + 'context': , + 'entity_id': 'sensor.pinecil_tip_resistance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.2', + }) +# --- +# name: test_sensors[sensor.pinecil_tip_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pinecil_tip_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tip temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_live_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_tip_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Tip temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_tip_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '298', + }) +# --- +# name: test_sensors[sensor.pinecil_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.pinecil_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_uptime', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.pinecil_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Pinecil Uptime', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.pinecil_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1671', + }) +# --- diff --git a/tests/components/iron_os/test_config_flow.py b/tests/components/iron_os/test_config_flow.py new file mode 100644 index 00000000000..231ec6cc3d6 --- /dev/null +++ b/tests/components/iron_os/test_config_flow.py @@ -0,0 +1,66 @@ +"""Tests for the Pinecil config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, MagicMock + +from homeassistant.components.iron_os import DOMAIN +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import DEFAULT_NAME, PINECIL_SERVICE_INFO, USER_INPUT + + +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, discovery: MagicMock +) -> None: + """Test the user config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == {} + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_no_device_discovered( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + discovery: MagicMock, +) -> None: + """Test setup with no device discoveries.""" + discovery.return_value = [] + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + + +async def test_async_step_bluetooth(hass: HomeAssistant) -> None: + """Test discovery via bluetooth..""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_BLUETOOTH}, + data=PINECIL_SERVICE_INFO, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "bluetooth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == {} + assert result["result"].unique_id == "c0:ff:ee:c0:ff:ee" diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py new file mode 100644 index 00000000000..fb0a782ea36 --- /dev/null +++ b/tests/components/iron_os/test_init.py @@ -0,0 +1,26 @@ +"""Test init of IronOS integration.""" + +from unittest.mock import AsyncMock + +from pynecil import CommunicationError +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("ble_device") +async def test_setup_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_device_info.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py new file mode 100644 index 00000000000..c091040668c --- /dev/null +++ b/tests/components/iron_os/test_number.py @@ -0,0 +1,104 @@ +"""Tests for the IronOS number platform.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +from pynecil import CharSetting, CommunicationError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def sensor_only() -> AsyncGenerator[None, None]: + """Enable only the number platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.NUMBER], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test the IronOS number platform states.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) + assert len(mock_pynecil.write.mock_calls) == 1 + mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service with exception.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.write.side_effect = CommunicationError + + with pytest.raises( + ServiceValidationError, + match="Failed to submit setting to device, try again later", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py new file mode 100644 index 00000000000..0c35193e400 --- /dev/null +++ b/tests/components/iron_os/test_sensor.py @@ -0,0 +1,73 @@ +"""Tests for the Pinecil Sensors.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +from pynecil import CommunicationError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.iron_os.coordinator import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +@pytest.fixture(autouse=True) +async def sensor_only() -> AsyncGenerator[None, None]: + """Enable only the sensor platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.SENSOR], + ): + yield + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, +) -> None: + """Test the Pinecil sensor platform.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors_unavailable( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_pynecil: AsyncMock, + ble_device: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the sensors when device disconnects.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.get_live_data.side_effect = CommunicationError + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + for entity_entry in entity_entries: + assert hass.states.get(entity_entry.entity_id).state == STATE_UNAVAILABLE diff --git a/tests/components/islamic_prayer_times/conftest.py b/tests/components/islamic_prayer_times/conftest.py index ae9b1f45eb9..ae0b6741fdf 100644 --- a/tests/components/islamic_prayer_times/conftest.py +++ b/tests/components/islamic_prayer_times/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the islamic_prayer_times tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/israel_rail/__init__.py b/tests/components/israel_rail/__init__.py new file mode 100644 index 00000000000..23cf9f5a821 --- /dev/null +++ b/tests/components/israel_rail/__init__.py @@ -0,0 +1,28 @@ +"""Tests for the israel_rail component.""" + +from datetime import timedelta + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.israel_rail.const import DEFAULT_SCAN_INTERVAL +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def init_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Set up the israel rail integration in Home Assistant.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +async def goto_future(hass: HomeAssistant, freezer: FrozenDateTimeFactory): + """Move to future.""" + freezer.tick(DEFAULT_SCAN_INTERVAL + timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() diff --git a/tests/components/israel_rail/conftest.py b/tests/components/israel_rail/conftest.py new file mode 100644 index 00000000000..07a101d40c7 --- /dev/null +++ b/tests/components/israel_rail/conftest.py @@ -0,0 +1,137 @@ +"""Configuration for Israel rail tests.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, patch +from zoneinfo import ZoneInfo + +from israelrailapi.api import TrainRoute +import pytest + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN + +from tests.common import MockConfigEntry + +VALID_CONFIG = { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", +} + +SOURCE_DEST = "באר יעקב אשקלון" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.israel_rail.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=VALID_CONFIG, + unique_id=SOURCE_DEST, + ) + + +@pytest.fixture +def mock_israelrail() -> AsyncMock: + """Build a fixture for the Israel rail API.""" + with ( + patch( + "homeassistant.components.israel_rail.TrainSchedule", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.israel_rail.config_flow.TrainSchedule", + new=mock_client, + ), + ): + client = mock_client.return_value + client.query.return_value = TRAINS + + yield client + + +def get_time(hour: int, minute: int) -> str: + """Return a time in isoformat.""" + return datetime(2021, 10, 10, hour, minute, 10, tzinfo=ZoneInfo("UTC")).isoformat() + + +def get_train_route( + train_number: str = "1234", + departure_time: str = "2021-10-10T10:10:10", + arrival_time: str = "2021-10-10T10:10:10", + origin_platform: str = "1", + dest_platform: str = "2", + origin_station: str = "3500", + destination_station: str = "3700", +) -> TrainRoute: + """Build a TrainRoute of the israelrail API.""" + return TrainRoute( + [ + { + "orignStation": origin_station, + "destinationStation": destination_station, + "departureTime": departure_time, + "arrivalTime": arrival_time, + "originPlatform": origin_platform, + "destPlatform": dest_platform, + "trainNumber": train_number, + } + ] + ) + + +TRAINS = [ + get_train_route( + train_number="1234", + departure_time=get_time(10, 10), + arrival_time=get_time(10, 30), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1235", + departure_time=get_time(10, 20), + arrival_time=get_time(10, 40), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1236", + departure_time=get_time(10, 30), + arrival_time=get_time(10, 50), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1237", + departure_time=get_time(10, 40), + arrival_time=get_time(11, 00), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), + get_train_route( + train_number="1238", + departure_time=get_time(10, 50), + arrival_time=get_time(11, 10), + origin_platform="1", + dest_platform="2", + origin_station="3500", + destination_station="3700", + ), +] diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..9806ecb1fae --- /dev/null +++ b/tests/components/israel_rail/snapshots/test_sensor.ambr @@ -0,0 +1,571 @@ +# serializer version: 1 +# name: test_valid_config[sensor.mock_title_departure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure0', + 'unique_id': 'באר יעקב אשקלון_departure', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:10:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +1', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure1', + 'unique_id': 'באר יעקב אשקלון_departure1', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +1', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:20:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_departure_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Departure +2', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure2', + 'unique_id': 'באר יעקב אשקלון_departure2', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_departure_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Departure +2', + }), + 'context': , + 'entity_id': 'sensor.mock_title_departure_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:30:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'platform', + 'unique_id': 'באר יעקב אשקלון_platform', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_none_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'trains', + 'unique_id': 'באר יעקב אשקלון_trains', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_none_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_none_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'train_number', + 'unique_id': 'באר יעקב אשקלון_train_number', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_none_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title None', + }), + 'context': , + 'entity_id': 'sensor.mock_title_none_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234', + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_platform', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Platform', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'platform', + 'unique_id': 'באר יעקב אשקלון_platform', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_platform-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Platform', + }), + 'context': , + 'entity_id': 'sensor.mock_title_platform', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure0', + 'unique_id': 'באר יעקב אשקלון_departure', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:10:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure1', + 'unique_id': 'באר יעקב אשקלון_departure1', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:20:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_timestamp_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'departure2', + 'unique_id': 'באר יעקב אשקלון_departure2', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_timestamp_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-10-10T10:30:10+00:00', + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_train_number', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Train number', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'train_number', + 'unique_id': 'באר יעקב אשקלון_train_number', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_train_number-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Train number', + }), + 'context': , + 'entity_id': 'sensor.mock_title_train_number', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234', + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_trains', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Trains', + 'platform': 'israel_rail', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'trains', + 'unique_id': 'באר יעקב אשקלון_trains', + 'unit_of_measurement': None, + }) +# --- +# name: test_valid_config[sensor.mock_title_trains-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Israel rail.', + 'friendly_name': 'Mock Title Trains', + }), + 'context': , + 'entity_id': 'sensor.mock_title_trains', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- diff --git a/tests/components/israel_rail/test_config_flow.py b/tests/components/israel_rail/test_config_flow.py new file mode 100644 index 00000000000..a27d9b3420b --- /dev/null +++ b/tests/components/israel_rail/test_config_flow.py @@ -0,0 +1,87 @@ +"""Define tests for the israel rail config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.israel_rail import CONF_DESTINATION, CONF_START, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import VALID_CONFIG + +from tests.common import MockConfigEntry + + +async def test_create_entry( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_israelrail: AsyncMock +) -> None: + """Test that the user step works.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_fails( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails.""" + mock_israelrail.query.side_effect = Exception("error") + failed_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data=VALID_CONFIG, + ) + + assert failed_result["errors"] == {"base": "unknown"} + assert failed_result["type"] is FlowResultType.FORM + + mock_israelrail.query.side_effect = None + + result = await hass.config_entries.flow.async_configure( + failed_result["flow_id"], + VALID_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "באר יעקב אשקלון" + assert result["data"] == { + CONF_START: "באר יעקב", + CONF_DESTINATION: "אשקלון", + } + + +async def test_flow_already_configured( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test that the user step fails when the entry is already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result_aborted = await hass.config_entries.flow.async_configure( + result["flow_id"], + VALID_CONFIG, + ) + + assert result_aborted["type"] is FlowResultType.ABORT + assert result_aborted["reason"] == "already_configured" diff --git a/tests/components/israel_rail/test_init.py b/tests/components/israel_rail/test_init.py new file mode 100644 index 00000000000..c4dd4e5721e --- /dev/null +++ b/tests/components/israel_rail/test_init.py @@ -0,0 +1,22 @@ +"""Test init of israel_rail integration.""" + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import MockConfigEntry + + +async def test_invalid_config( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_israelrail: AsyncMock, +) -> None: + """Ensure nothing is created when config is wrong.""" + mock_israelrail.query.side_effect = Exception("error") + await init_integration(hass, mock_config_entry) + assert not hass.states.async_entity_ids("sensor") + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/israel_rail/test_sensor.py b/tests/components/israel_rail/test_sensor.py new file mode 100644 index 00000000000..d044dfe1d7c --- /dev/null +++ b/tests/components/israel_rail/test_sensor.py @@ -0,0 +1,69 @@ +"""Tests for the israel_rail sensor.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import goto_future, init_integration +from .conftest import TRAINS, get_time + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_valid_config( + hass: HomeAssistant, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Ensure everything starts correctly.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_train( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the train data is updated.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 10) + assert departure_sensor.state == expected_time + + mock_israelrail.query.return_value = TRAINS[1:] + + await goto_future(hass, freezer) + + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + expected_time = get_time(10, 20) + assert departure_sensor.state == expected_time + + +async def test_fail_query( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_israelrail: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Ensure the integration handles query failures.""" + await init_integration(hass, mock_config_entry) + assert len(hass.states.async_entity_ids()) == 6 + mock_israelrail.query.side_effect = Exception("error") + await goto_future(hass, freezer) + assert len(hass.states.async_entity_ids()) == 6 + departure_sensor = hass.states.get("sensor.mock_title_departure") + assert departure_sensor.state == STATE_UNAVAILABLE diff --git a/tests/components/ista_ecotrend/conftest.py b/tests/components/ista_ecotrend/conftest.py index 2218ef05ba7..cbbc166031d 100644 --- a/tests/components/ista_ecotrend/conftest.py +++ b/tests/components/ista_ecotrend/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the ista EcoTrend tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ista_ecotrend.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/ista_ecotrend/snapshots/test_init.ambr b/tests/components/ista_ecotrend/snapshots/test_init.ambr index c5dec7d9d56..c84d55c059c 100644 --- a/tests/components/ista_ecotrend/snapshots/test_init.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', + 'model_id': None, 'name': 'Luxemburger Str. 1', 'name_by_user': None, 'primary_config_entry': , @@ -52,6 +53,7 @@ }), 'manufacturer': 'ista SE', 'model': 'ista EcoTrend', + 'model_id': None, 'name': 'Bahnhofsstr. 1A', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr index c312f9b6350..f9ab7a54b63 100644 --- a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr @@ -64,7 +64,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -92,13 +94,15 @@ 'supported_features': 0, 'translation_key': , 'unique_id': 'eaf5c5c8-889f-4a3c-b68c-e9a676505762_heating', - 'unit_of_measurement': None, + 'unit_of_measurement': 'units', }) # --- # name: test_setup[sensor.bahnhofsstr_1a_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Bahnhofsstr. 1A Heating', + 'state_class': , + 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.bahnhofsstr_1a_heating', @@ -491,7 +495,9 @@ 'aliases': set({ }), 'area_id': None, - 'capabilities': None, + 'capabilities': dict({ + 'state_class': , + }), 'config_entry_id': , 'device_class': None, 'device_id': , @@ -519,13 +525,15 @@ 'supported_features': 0, 'translation_key': , 'unique_id': '26e93f1a-c828-11ea-87d0-0242ac130003_heating', - 'unit_of_measurement': None, + 'unit_of_measurement': 'units', }) # --- # name: test_setup[sensor.luxemburger_str_1_heating-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Luxemburger Str. 1 Heating', + 'state_class': , + 'unit_of_measurement': 'units', }), 'context': , 'entity_id': 'sensor.luxemburger_str_1_heating', diff --git a/tests/components/jellyfin/conftest.py b/tests/components/jellyfin/conftest.py index 40d03212ceb..c3732714177 100644 --- a/tests/components/jellyfin/conftest.py +++ b/tests/components/jellyfin/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from jellyfin_apiclient_python import JellyfinClient @@ -9,7 +10,6 @@ from jellyfin_apiclient_python.api import API from jellyfin_apiclient_python.configuration import Config from jellyfin_apiclient_python.connection_manager import ConnectionManager import pytest -from typing_extensions import Generator from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME diff --git a/tests/components/jellyfin/test_init.py b/tests/components/jellyfin/test_init.py index 51d7af2ae94..1af59737296 100644 --- a/tests/components/jellyfin/test_init.py +++ b/tests/components/jellyfin/test_init.py @@ -68,12 +68,10 @@ async def test_load_unload_config_entry( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.LOADED await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/jewish_calendar/conftest.py b/tests/components/jewish_calendar/conftest.py index 5e16289f473..97909291f27 100644 --- a/tests/components/jewish_calendar/conftest.py +++ b/tests/components/jewish_calendar/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the jewish_calendar tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.jewish_calendar.const import DEFAULT_NAME, DOMAIN diff --git a/tests/components/jewish_calendar/test_config_flow.py b/tests/components/jewish_calendar/test_config_flow.py index 3189571a5a7..466d3a1e4f0 100644 --- a/tests/components/jewish_calendar/test_config_flow.py +++ b/tests/components/jewish_calendar/test_config_flow.py @@ -9,6 +9,7 @@ from homeassistant.components.jewish_calendar.const import ( CONF_CANDLE_LIGHT_MINUTES, CONF_DIASPORA, CONF_HAVDALAH_OFFSET_MINUTES, + DEFAULT_CANDLE_LIGHT, DEFAULT_DIASPORA, DEFAULT_LANGUAGE, DOMAIN, @@ -138,3 +139,28 @@ async def test_options(hass: HomeAssistant, mock_config_entry: MockConfigEntry) assert len(entries) == 1 assert entries[0].options[CONF_CANDLE_LIGHT_MINUTES] == 25 assert entries[0].options[CONF_HAVDALAH_OFFSET_MINUTES] == 34 + + +async def test_options_reconfigure( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that updating the options of the Jewish Calendar integration triggers a value update.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert CONF_CANDLE_LIGHT_MINUTES not in mock_config_entry.options + + # Update the CONF_CANDLE_LIGHT_MINUTES option to a new value + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CANDLE_LIGHT_MINUTES: DEFAULT_CANDLE_LIGHT + 1, + }, + ) + assert result["result"] + + # The value of the "upcoming_shabbat_candle_lighting" sensor should be the new value + assert ( + mock_config_entry.options[CONF_CANDLE_LIGHT_MINUTES] == DEFAULT_CANDLE_LIGHT + 1 + ) diff --git a/tests/components/jvc_projector/conftest.py b/tests/components/jvc_projector/conftest.py index dd012d3f355..3115cbfe252 100644 --- a/tests/components/jvc_projector/conftest.py +++ b/tests/components/jvc_projector/conftest.py @@ -1,9 +1,9 @@ """Fixtures for JVC Projector integration.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.jvc_projector.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT diff --git a/tests/components/kaleidescape/conftest.py b/tests/components/kaleidescape/conftest.py index 5cd2a8ebb18..e5aeedc3895 100644 --- a/tests/components/kaleidescape/conftest.py +++ b/tests/components/kaleidescape/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Kaleidescape integration.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from kaleidescape import Dispatcher from kaleidescape.device import Automation, Movie, Power, System import pytest -from typing_extensions import Generator from homeassistant.components.kaleidescape.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/kira/test_init.py b/tests/components/kira/test_init.py index e57519667ce..8e6c70c83a4 100644 --- a/tests/components/kira/test_init.py +++ b/tests/components/kira/test_init.py @@ -1,6 +1,7 @@ """The tests for Kira.""" import os +from pathlib import Path import shutil import tempfile from unittest.mock import patch @@ -76,10 +77,9 @@ async def test_kira_creates_codes(work_dir) -> None: assert os.path.exists(code_path), "Kira component didn't create codes file" -async def test_load_codes(work_dir) -> None: +async def test_load_codes(hass: HomeAssistant, work_dir) -> None: """Kira should ignore invalid codes.""" code_path = os.path.join(work_dir, "codes.yaml") - with open(code_path, "w", encoding="utf8") as code_file: - code_file.write(KIRA_CODES) + await hass.async_add_executor_job(Path(code_path).write_text, KIRA_CODES) res = kira.load_codes(code_path) assert len(res) == 1, "Expected exactly 1 valid Kira code" diff --git a/tests/components/kitchen_sink/snapshots/test_switch.ambr b/tests/components/kitchen_sink/snapshots/test_switch.ambr index 277b4888e05..fe4311ad711 100644 --- a/tests/components/kitchen_sink/snapshots/test_switch.ambr +++ b/tests/components/kitchen_sink/snapshots/test_switch.ambr @@ -67,6 +67,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Outlet 1', 'name_by_user': None, 'primary_config_entry': , @@ -98,6 +99,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, 'primary_config_entry': , @@ -175,6 +177,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Outlet 2', 'name_by_user': None, 'primary_config_entry': , @@ -206,6 +209,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Power strip with 2 sockets', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/kitchen_sink/test_config_flow.py b/tests/components/kitchen_sink/test_config_flow.py index 290167196cd..5f163d1342e 100644 --- a/tests/components/kitchen_sink/test_config_flow.py +++ b/tests/components/kitchen_sink/test_config_flow.py @@ -1,6 +1,6 @@ """Test the Everything but the Kitchen Sink config flow.""" -from collections.abc import AsyncGenerator +from collections.abc import Generator from unittest.mock import patch import pytest @@ -15,7 +15,7 @@ from tests.common import MockConfigEntry @pytest.fixture -async def no_platforms() -> AsyncGenerator[None, None]: +def no_platforms() -> Generator[None]: """Don't enable any platforms.""" with patch( "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 1547a10bd2b..0575141bb3b 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import ANY import pytest from homeassistant.components.kitchen_sink import DOMAIN -from homeassistant.components.recorder import Recorder, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.statistics import ( async_add_external_statistics, get_last_statistics, @@ -24,14 +24,13 @@ from tests.typing import ClientSessionGenerator, WebSocketGenerator @pytest.fixture -def mock_history(hass): +def mock_history(hass: HomeAssistant) -> None: """Mock history component loaded.""" hass.config.components.add("history") -async def test_demo_statistics( - recorder_mock: Recorder, mock_history, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock", "mock_history") +async def test_demo_statistics(hass: HomeAssistant) -> None: """Test that the kitchen sink component makes some statistics available.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -63,9 +62,8 @@ async def test_demo_statistics( } in statistic_ids -async def test_demo_statistics_growth( - recorder_mock: Recorder, mock_history, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock", "mock_history") +async def test_demo_statistics_growth(hass: HomeAssistant) -> None: """Test that the kitchen sink sum statistics adds to the previous state.""" hass.config.units = US_CUSTOMARY_SYSTEM @@ -104,8 +102,8 @@ async def test_demo_statistics_growth( @pytest.mark.freeze_time("2023-10-21") +@pytest.mark.usefixtures("mock_history") async def test_issues_created( - mock_history, hass: HomeAssistant, hass_client: ClientSessionGenerator, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/kitchen_sink/test_notify.py b/tests/components/kitchen_sink/test_notify.py index df025087b6b..12e19ffaa49 100644 --- a/tests/components/kitchen_sink/test_notify.py +++ b/tests/components/kitchen_sink/test_notify.py @@ -1,10 +1,10 @@ """The tests for the demo button component.""" +from collections.abc import AsyncGenerator from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.notify import ( diff --git a/tests/components/kitchen_sink/test_switch.py b/tests/components/kitchen_sink/test_switch.py index c744ba2be44..d006908e264 100644 --- a/tests/components/kitchen_sink/test_switch.py +++ b/tests/components/kitchen_sink/test_switch.py @@ -1,5 +1,6 @@ """The tests for the demo switch component.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -20,7 +21,7 @@ SWITCH_ENTITY_IDS = ["switch.outlet_1", "switch.outlet_2"] @pytest.fixture -async def switch_only() -> None: +def switch_only() -> Generator[None]: """Enable only the switch platform.""" with patch( "homeassistant.components.kitchen_sink.COMPONENTS_WITH_DEMO_PLATFORM", @@ -30,7 +31,7 @@ async def switch_only() -> None: @pytest.fixture(autouse=True) -async def setup_comp(hass, switch_only): +async def setup_comp(hass: HomeAssistant, switch_only: None) -> None: """Set up demo component.""" assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) await hass.async_block_till_done() @@ -55,7 +56,7 @@ async def test_state( @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_on(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn on method.""" await hass.services.async_call( SWITCH_DOMAIN, @@ -79,7 +80,7 @@ async def test_turn_on(hass: HomeAssistant, switch_entity_id) -> None: @pytest.mark.parametrize("switch_entity_id", SWITCH_ENTITY_IDS) -async def test_turn_off(hass: HomeAssistant, switch_entity_id) -> None: +async def test_turn_off(hass: HomeAssistant, switch_entity_id: str) -> None: """Test switch turn off method.""" await hass.services.async_call( SWITCH_DOMAIN, diff --git a/tests/components/kmtronic/conftest.py b/tests/components/kmtronic/conftest.py index 5dc349508e3..11abd2a4d7b 100644 --- a/tests/components/kmtronic/conftest.py +++ b/tests/components/kmtronic/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for kmtronic tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/knocki/conftest.py b/tests/components/knocki/conftest.py index e1bc2e29cde..2fae89c730d 100644 --- a/tests/components/knocki/conftest.py +++ b/tests/components/knocki/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Knocki tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from knocki import TokenResponse, Trigger import pytest -from typing_extensions import Generator from homeassistant.components.knocki.const import DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/knx/README.md b/tests/components/knx/README.md index 930b9e71c28..8778feb2251 100644 --- a/tests/components/knx/README.md +++ b/tests/components/knx/README.md @@ -24,9 +24,10 @@ All outgoing telegrams are pushed to an assertion queue. Assert them in order th Asserts that no telegram was sent (assertion queue is empty). - `knx.assert_telegram_count(count: int)` Asserts that `count` telegrams were sent. -- `knx.assert_read(group_address: str)` +- `knx.assert_read(group_address: str, response: int | tuple[int, ...] | None = None)` Asserts that a GroupValueRead telegram was sent to `group_address`. The telegram will be removed from the assertion queue. + Optionally inject incoming GroupValueResponse telegram after reception to clear the value reader waiting task. This can also be done manually with `knx.receive_response`. - `knx.assert_response(group_address: str, payload: int | tuple[int, ...])` Asserts that a GroupValueResponse telegram with `payload` was sent to `group_address`. The telegram will be removed from the assertion queue. diff --git a/tests/components/knx/__init__.py b/tests/components/knx/__init__.py index eaa84714dc5..76ae91a193d 100644 --- a/tests/components/knx/__init__.py +++ b/tests/components/knx/__init__.py @@ -1 +1,7 @@ """Tests for the KNX integration.""" + +from collections.abc import Awaitable, Callable + +from homeassistant.helpers import entity_registry as er + +KnxEntityGenerator = Callable[..., Awaitable[er.RegistryEntry]] diff --git a/tests/components/knx/conftest.py b/tests/components/knx/conftest.py index cd7146b565b..19f2bc4d845 100644 --- a/tests/components/knx/conftest.py +++ b/tests/components/knx/conftest.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import json from typing import Any from unittest.mock import DEFAULT, AsyncMock, Mock, patch @@ -30,13 +29,22 @@ from homeassistant.components.knx.const import ( DOMAIN as KNX_DOMAIN, ) from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_fixture +from . import KnxEntityGenerator -FIXTURE_PROJECT_DATA = json.loads(load_fixture("project.json", KNX_DOMAIN)) +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.typing import WebSocketGenerator + +FIXTURE_PROJECT_DATA = load_json_object_fixture("project.json", KNX_DOMAIN) +FIXTURE_CONFIG_STORAGE_DATA = load_json_object_fixture("config_store.json", KNX_DOMAIN) class KNXTestKit: @@ -75,7 +83,7 @@ class KNXTestKit: self.xknx.rate_limit = 0 # set XknxConnectionState.CONNECTED to avoid `unavailable` entities at startup # and start StateUpdater. This would be awaited on normal startup too. - await self.xknx.connection_manager.connection_state_changed( + self.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_TCP, ) @@ -85,6 +93,7 @@ class KNXTestKit: mock = Mock() mock.start = AsyncMock(side_effect=patch_xknx_start) mock.stop = AsyncMock() + mock.gateway_info = AsyncMock() return mock def fish_xknx(*args, **kwargs): @@ -143,8 +152,6 @@ class KNXTestKit: ) -> None: """Assert outgoing telegram. One by one in timely order.""" await self.xknx.telegrams.join() - await self.hass.async_block_till_done() - await self.hass.async_block_till_done() try: telegram = self._outgoing_telegrams.get_nowait() except asyncio.QueueEmpty as err: @@ -166,9 +173,16 @@ class KNXTestKit: telegram.payload.value.value == payload # type: ignore[attr-defined] ), f"Payload mismatch in {telegram} - Expected: {payload}" - async def assert_read(self, group_address: str) -> None: - """Assert outgoing GroupValueRead telegram. One by one in timely order.""" + async def assert_read( + self, group_address: str, response: int | tuple[int, ...] | None = None + ) -> None: + """Assert outgoing GroupValueRead telegram. One by one in timely order. + + Optionally inject incoming GroupValueResponse telegram after reception. + """ await self.assert_telegram(group_address, None, GroupValueRead) + if response is not None: + await self.receive_response(group_address, response) async def assert_response( self, group_address: str, payload: int | tuple[int, ...] @@ -232,6 +246,7 @@ class KNXTestKit: GroupValueResponse(payload_value), source=source, ) + await asyncio.sleep(0) # advance loop to allow StateUpdater to process async def receive_write( self, @@ -280,3 +295,53 @@ def load_knxproj(hass_storage: dict[str, Any]) -> None: "version": 1, "data": FIXTURE_PROJECT_DATA, } + + +@pytest.fixture +def load_config_store(hass_storage: dict[str, Any]) -> None: + """Mock KNX config store data.""" + hass_storage[KNX_CONFIG_STORAGE_KEY] = FIXTURE_CONFIG_STORAGE_DATA + + +@pytest.fixture +async def create_ui_entity( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], +) -> KnxEntityGenerator: + """Return a helper to create a KNX entities via WS. + + The KNX integration must be set up before using the helper. + """ + ws_client = await hass_ws_client(hass) + + async def _create_ui_entity( + platform: Platform, + knx_data: dict[str, Any], + entity_data: dict[str, Any] | None = None, + ) -> er.RegistryEntry: + """Create a KNX entity from WS with given configuration.""" + if entity_data is None: + entity_data = {"name": "Test"} + + await ws_client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": platform, + "data": { + "entity": entity_data, + "knx": knx_data, + }, + } + ) + res = await ws_client.receive_json() + assert res["success"], res + assert res["result"]["success"] is True, res["result"] + entity_id = res["result"]["entity_id"] + + entity = entity_registry.async_get(entity_id) + assert entity + return entity + + return _create_ui_entity diff --git a/tests/components/knx/fixtures/config_store.json b/tests/components/knx/fixtures/config_store.json new file mode 100644 index 00000000000..971b692ade1 --- /dev/null +++ b/tests/components/knx/fixtures/config_store.json @@ -0,0 +1,29 @@ +{ + "version": 1, + "minor_version": 1, + "key": "knx/config_store.json", + "data": { + "entities": { + "switch": { + "knx_es_9d97829f47f1a2a3176a7c5b4216070c": { + "entity": { + "entity_category": null, + "name": "test", + "device_info": "knx_vdev_4c80a564f5fe5da701ed293966d6384d" + }, + "knx": { + "ga_switch": { + "write": "1/1/45", + "state": "1/0/45", + "passive": [] + }, + "invert": false, + "sync_state": true, + "respond_to_read": false + } + } + }, + "light": {} + } + } +} diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index b9216aa149a..dbb8d2ee832 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -2,6 +2,8 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.knx.const import CONF_STATE_ADDRESS, CONF_SYNC_STATE from homeassistant.components.knx.schema import BinarySensorSchema from homeassistant.const import ( @@ -13,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -123,31 +124,30 @@ async def test_binary_sensor_ignore_internal_state( # receive initial ON telegram await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 2 # receive second ON telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 3 # receive first OFF telegram await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 5 # receive second OFF telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 6 -async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_counter( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with context timeout.""" - async_fire_time_changed(hass, dt_util.utcnow()) context_timeout = 1 await knx.setup_integration( @@ -166,21 +166,18 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No # receive initial ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() # no change yet - still in 1 sec context (additional async_block_till_done needed for time change) assert len(events) == 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) - await hass.async_block_till_done() + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - # additional async_block_till_done needed event capture - await hass.async_block_till_done() assert len(events) == 2 event = events.pop(0).data assert event.get("new_state").attributes.get("counter") == 1 @@ -196,9 +193,9 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 @@ -211,10 +208,12 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No assert event.get("old_state").attributes.get("counter") == 2 -async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_reset( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with reset_after function.""" - async_fire_time_changed(hass, dt_util.utcnow()) - await knx.setup_integration( { BinarySensorSchema.PLATFORM: [ @@ -230,11 +229,10 @@ async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None # receive ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) - await hass.async_block_till_done() + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") @@ -265,7 +263,6 @@ async def test_binary_sensor_restore_and_respond(hass: HomeAssistant, knx) -> No await knx.assert_telegram_count(0) await knx.receive_write(_ADDRESS, False) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF @@ -296,6 +293,5 @@ async def test_binary_sensor_restore_invert(hass: HomeAssistant, knx) -> None: # inverted is on, make sure the state is off after it await knx.receive_write(_ADDRESS, True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 613208d5595..a05752eced1 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -3,20 +3,22 @@ from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_button_simple( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX button with default payload.""" await knx.setup_integration( { @@ -38,7 +40,8 @@ async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: # received telegrams on button GA are ignored by the entity old_state = hass.states.get("button.test") - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) + freezer.tick(timedelta(seconds=3)) + async_fire_time_changed(hass) await knx.receive_write("1/2/3", False) await knx.receive_write("1/2/3", True) new_state = hass.states.get("button.test") diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 9c431386b43..77eeeef3559 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -80,12 +80,6 @@ async def test_climate_on_off( ) } ) - - await hass.async_block_till_done() - # read heat/cool state - if heat_cool_ga: - await knx.assert_read("1/2/11") - await knx.receive_response("1/2/11", 0) # cool # read temperature state await knx.assert_read("1/2/3") await knx.receive_response("1/2/3", RAW_FLOAT_20_0) @@ -95,6 +89,10 @@ async def test_climate_on_off( # read on/off state await knx.assert_read("1/2/9") await knx.receive_response("1/2/9", 1) + # read heat/cool state + if heat_cool_ga: + await knx.assert_read("1/2/11") + await knx.receive_response("1/2/11", 0) # cool # turn off await hass.services.async_call( @@ -171,18 +169,15 @@ async def test_climate_hvac_mode( ) } ) - - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # turn hvac mode to off - set_hvac_mode() doesn't send to on_off if dedicated hvac mode is available await hass.services.async_call( @@ -254,17 +249,14 @@ async def test_climate_preset_mode( ) events = async_capture_events(hass, "state_changed") - await hass.async_block_till_done() - # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) events.clear() # set preset mode @@ -294,8 +286,6 @@ async def test_climate_preset_mode( assert len(knx.xknx.devices[1].device_updated_cbs) == 2 # test removing also removes hooks entity_registry.async_remove("climate.test") - await hass.async_block_till_done() - # If we remove the entity the underlying devices should disappear too assert len(knx.xknx.devices) == 0 @@ -315,18 +305,15 @@ async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) assert await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - # StateUpdater semaphore allows 2 concurrent requests await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # verify update entity retriggers group value reads to the bus await hass.services.async_call( @@ -354,8 +341,6 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> } } ) - - await hass.async_block_till_done() # read states state updater await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index f12a57f97ba..a7da2d26600 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -76,10 +76,10 @@ def patch_file_upload(return_value=FIXTURE_KEYRING, side_effect=None): """Patch file upload. Yields the Keyring instance (return_value).""" with ( patch( - "homeassistant.components.knx.helpers.keyring.process_uploaded_file" + "homeassistant.components.knx.storage.keyring.process_uploaded_file" ) as file_upload_mock, patch( - "homeassistant.components.knx.helpers.keyring.sync_load_keyring", + "homeassistant.components.knx.storage.keyring.sync_load_keyring", return_value=return_value, side_effect=side_effect, ), @@ -184,7 +184,6 @@ async def test_routing_setup( CONF_KNX_INDIVIDUAL_ADDRESS: "1.1.110", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -259,7 +258,6 @@ async def test_routing_setup_advanced( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -350,7 +348,6 @@ async def test_routing_secure_manual_setup( CONF_KNX_ROUTING_SYNC_LATENCY_TOLERANCE: 2000, }, ) - await hass.async_block_till_done() assert secure_routing_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_routing_manual["title"] == "Secure Routing as 0.0.123" assert secure_routing_manual["data"] == { @@ -419,7 +416,6 @@ async def test_routing_secure_keyfile( CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert routing_secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert routing_secure_knxkeys["title"] == "Secure Routing as 0.0.123" assert routing_secure_knxkeys["data"] == { @@ -552,7 +548,6 @@ async def test_tunneling_setup_manual( result2["flow_id"], user_input, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == title assert result3["data"] == config_entry_data @@ -681,7 +676,6 @@ async def test_tunneling_setup_manual_request_description_error( CONF_PORT: 3671, }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Tunneling TCP @ 192.168.0.1" assert result["data"] == { @@ -772,7 +766,6 @@ async def test_tunneling_setup_for_local_ip( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Tunneling UDP @ 192.168.0.2" assert result3["data"] == { @@ -821,7 +814,6 @@ async def test_tunneling_setup_for_multiple_found_gateways( tunnel_flow["flow_id"], {CONF_KNX_GATEWAY: str(gateway)}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { **DEFAULT_ENTRY_DATA, @@ -905,7 +897,6 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_CONNECTION_TYPE: CONF_KNX_AUTOMATIC, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == CONF_KNX_AUTOMATIC.capitalize() assert result2["data"] == { @@ -1040,7 +1031,6 @@ async def test_configure_secure_tunnel_manual(hass: HomeAssistant, knx_setup) -> CONF_KNX_SECURE_DEVICE_AUTHENTICATION: "device_auth", }, ) - await hass.async_block_till_done() assert secure_tunnel_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_tunnel_manual["data"] == { **DEFAULT_ENTRY_DATA, @@ -1086,7 +1076,6 @@ async def test_configure_secure_knxkeys(hass: HomeAssistant, knx_setup) -> None: {CONF_KNX_TUNNEL_ENDPOINT_IA: CONF_KNX_AUTOMATIC}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert secure_knxkeys["data"] == { **DEFAULT_ENTRY_DATA, @@ -1201,7 +1190,6 @@ async def test_options_flow_connection_type( CONF_KNX_GATEWAY: str(gateway), }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3["data"] assert mock_config_entry.data == { @@ -1307,7 +1295,6 @@ async def test_options_flow_secure_manual_to_keyfile( {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1"}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert mock_config_entry.data == { **DEFAULT_ENTRY_DATA, @@ -1352,7 +1339,6 @@ async def test_options_communication_settings( CONF_KNX_TELEGRAM_LOG_SIZE: 3000, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1405,7 +1391,6 @@ async def test_options_update_keyfile(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1463,7 +1448,6 @@ async def test_options_keyfile_upload(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3.get("data") assert mock_config_entry.data == { diff --git a/tests/components/knx/test_config_store.py b/tests/components/knx/test_config_store.py new file mode 100644 index 00000000000..116f4b5d839 --- /dev/null +++ b/tests/components/knx/test_config_store.py @@ -0,0 +1,412 @@ +"""Test KNX config store.""" + +from typing import Any + +import pytest + +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import KnxEntityGenerator +from .conftest import KNXTestKit + +from tests.typing import WebSocketGenerator + + +async def test_create_entity( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_name = "Test no device" + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": test_name}, + ) + + # Test if entity is correctly stored in registry + await client.send_json_auto_id({"type": "knx/get_entity_entries"}) + res = await client.receive_json() + assert res["success"], res + assert res["result"] == [ + test_entity.extended_dict, + ] + # Test if entity is correctly stored in config store + test_storage_data = next( + iter( + hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"].values() + ) + ) + assert test_storage_data == { + "entity": { + "name": test_name, + "device_info": None, + "entity_category": None, + }, + "knx": { + "ga_switch": {"write": "1/2/3", "state": None, "passive": []}, + "invert": False, + "respond_to_read": False, + "sync_state": True, + }, + } + + +async def test_create_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test unsuccessful entity creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + # create entity with invalid platform + await client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": "invalid_platform", + "data": { + "entity": {"name": "Test invalid platform"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("expected Platform or one of") + + # create entity with unsupported platform + await client.send_json_auto_id( + { + "type": "knx/create_entity", + "platform": Platform.TTS, # "tts" is not a supported platform (and is unlikely to ever be) + "data": { + "entity": {"name": "Test invalid platform"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("value must be one of") + + +async def test_update_entity( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity update.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + test_entity_id = test_entity.entity_id + + # update entity + new_name = "Updated name" + new_ga_switch_write = "4/5/6" + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + "entity_id": test_entity_id, + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] + + entity = entity_registry.async_get(test_entity_id) + assert entity + assert entity.original_name == new_name + + assert ( + hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"]["switch"][ + test_entity.unique_id + ]["knx"]["ga_switch"]["write"] + == new_ga_switch_write + ) + + +async def test_update_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity update.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + + # update unsupported platform + new_name = "Updated name" + new_ga_switch_write = "4/5/6" + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.TTS, + "entity_id": test_entity.entity_id, + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert not res["result"]["success"] + assert res["result"]["errors"][0]["path"] == ["platform"] + assert res["result"]["error_base"].startswith("value must be one of") + + # entity not found + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + "entity_id": "non_existing_entity_id", + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found:") + + # entity not in storage + await client.send_json_auto_id( + { + "type": "knx/update_entity", + "platform": Platform.SWITCH, + # `sensor` isn't yet supported, but we only have sensor entities automatically + # created with no configuration - it doesn't ,atter for the test though + "entity_id": "sensor.knx_interface_individual_address", + "data": { + "entity": {"name": new_name}, + "knx": {"ga_switch": {"write": new_ga_switch_write}}, + }, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found in storage") + + +async def test_delete_entity( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity deletion.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + test_entity_id = test_entity.entity_id + + # delete entity + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert res["success"], res + + assert not entity_registry.async_get(test_entity_id) + assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") + + +async def test_delete_entity_error( + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + hass_storage: dict[str, Any], +) -> None: + """Test unsuccessful entity deletion.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + # delete unknown entity + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": "switch.non_existing_entity", + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found") + + # delete entity not in config store + test_entity_id = "sensor.knx_interface_individual_address" + assert entity_registry.async_get(test_entity_id) + await client.send_json_auto_id( + { + "type": "knx/delete_entity", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith("Entity not found") + + +async def test_get_entity_config( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test entity config retrieval.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + test_entity = await create_ui_entity( + platform=Platform.SWITCH, + knx_data={"ga_switch": {"write": "1/2/3"}}, + entity_data={"name": "Test"}, + ) + + await client.send_json_auto_id( + { + "type": "knx/get_entity_config", + "entity_id": test_entity.entity_id, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["platform"] == Platform.SWITCH + assert res["result"]["data"] == { + "entity": { + "name": "Test", + "device_info": None, + "entity_category": None, + }, + "knx": { + "ga_switch": {"write": "1/2/3", "passive": [], "state": None}, + "respond_to_read": False, + "invert": False, + "sync_state": True, + }, + } + + +@pytest.mark.parametrize( + ("test_entity_id", "error_message_start"), + [ + ("switch.non_existing_entity", "Entity not found"), + ("sensor.knx_interface_individual_address", "Entity data not found"), + ], +) +async def test_get_entity_config_error( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + test_entity_id: str, + error_message_start: str, +) -> None: + """Test entity config retrieval errors.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/get_entity_config", + "entity_id": test_entity_id, + } + ) + res = await client.receive_json() + assert not res["success"], res + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"].startswith(error_message_start) + + +async def test_validate_entity( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test entity validation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/validate_entity", + "platform": Platform.SWITCH, + "data": { + "entity": {"name": "test_name"}, + "knx": {"ga_switch": {"write": "1/2/3"}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] is True + + # invalid data + await client.send_json_auto_id( + { + "type": "knx/validate_entity", + "platform": Platform.SWITCH, + "data": { + "entity": {"name": "test_name"}, + "knx": {"ga_switch": {}}, + }, + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["success"] is False + assert res["result"]["errors"][0]["path"] == ["data", "knx", "ga_switch", "write"] + assert res["result"]["errors"][0]["error_message"] == "required key not provided" + assert res["result"]["error_base"].startswith("required key not provided") diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index c8c6bd4f346..4b66769a8a3 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -34,7 +34,8 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.assert_write( test_address, - (0x78, 0x01, 0x01, 0x73, 0x04, 0x05, 0x20, 0x80), + # service call in UTC, telegram in local time + (0x78, 0x01, 0x01, 0x13, 0x04, 0x05, 0x24, 0x00), ) state = hass.states.get("datetime.test") assert state.state == "2020-01-02T03:04:05+00:00" @@ -74,7 +75,7 @@ async def test_date_restore_and_respond(hass: HomeAssistant, knx: KNXTestKit) -> await knx.receive_read(test_address) await knx.assert_response( test_address, - (0x7A, 0x03, 0x03, 0x84, 0x04, 0x05, 0x20, 0x80), + (0x7A, 0x03, 0x03, 0x04, 0x04, 0x05, 0x24, 0x00), ) # don't respond to passive address diff --git a/tests/components/knx/test_device.py b/tests/components/knx/test_device.py new file mode 100644 index 00000000000..330fd854a50 --- /dev/null +++ b/tests/components/knx/test_device.py @@ -0,0 +1,77 @@ +"""Test KNX devices.""" + +from typing import Any + +from homeassistant.components.knx.const import DOMAIN +from homeassistant.components.knx.storage.config_store import ( + STORAGE_KEY as KNX_CONFIG_STORAGE_KEY, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from .conftest import KNXTestKit + +from tests.typing import WebSocketGenerator + + +async def test_create_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test device creation.""" + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await client.send_json_auto_id( + { + "type": "knx/create_device", + "name": "Test Device", + } + ) + res = await client.receive_json() + assert res["success"], res + assert res["result"]["name"] == "Test Device" + assert res["result"]["manufacturer"] == "KNX" + assert res["result"]["identifiers"] + assert res["result"]["config_entries"][0] == knx.mock_config_entry.entry_id + + device_identifier = res["result"]["identifiers"][0][1] + assert device_registry.async_get_device({(DOMAIN, device_identifier)}) + device_id = res["result"]["id"] + assert device_registry.async_get(device_id) + + +async def test_remove_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + hass_ws_client: WebSocketGenerator, + load_config_store: None, + hass_storage: dict[str, Any], +) -> None: + """Test device removal.""" + assert await async_setup_component(hass, "config", {}) + await knx.setup_integration({}) + client = await hass_ws_client(hass) + + await knx.assert_read("1/0/45", response=True) + + assert hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") + test_device = device_registry.async_get_device( + {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} + ) + device_id = test_device.id + device_entities = entity_registry.entities.get_entries_for_device_id(device_id) + assert len(device_entities) == 1 + + response = await client.remove_device(device_id, knx.mock_config_entry.entry_id) + assert response["success"] + assert not device_registry.async_get_device( + {(DOMAIN, "knx_vdev_4c80a564f5fe5da701ed293966d6384d")} + ) + assert not entity_registry.entities.get_entries_for_device_id(device_id) + assert not hass_storage[KNX_CONFIG_STORAGE_KEY]["data"]["entities"].get("switch") diff --git a/tests/components/knx/test_device_trigger.py b/tests/components/knx/test_device_trigger.py index 136dddefaab..e5f776a9404 100644 --- a/tests/components/knx/test_device_trigger.py +++ b/tests/components/knx/test_device_trigger.py @@ -18,18 +18,12 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_if_fires_on_telegram( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -98,31 +92,31 @@ async def test_if_fires_on_telegram( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_default_if_fires_on_telegram( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -179,34 +173,34 @@ async def test_default_if_fires_on_telegram( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall catch GroupValueRead as it is not set explicitly await knx.receive_read("1/2/4") - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 async def test_remove_device_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, knx: KNXTestKit, ) -> None: @@ -241,8 +235,8 @@ async def test_remove_device_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -250,8 +244,10 @@ async def test_remove_device_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) + assert len(service_calls) == 1 + await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_get_triggers( @@ -395,7 +391,6 @@ async def test_invalid_device_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_events.py b/tests/components/knx/test_events.py index ddb9d50240c..2228781ba89 100644 --- a/tests/components/knx/test_events.py +++ b/tests/components/knx/test_events.py @@ -31,7 +31,6 @@ async def test_knx_event( events = async_capture_events(hass, "knx_event") async def test_event_data(address, payload, value=None): - await hass.async_block_till_done() assert len(events) == 1 event = events.pop() assert event.data["data"] == payload @@ -69,7 +68,6 @@ async def test_knx_event( ) # no event received - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams for group addresses matching the filter @@ -101,7 +99,6 @@ async def test_knx_event( await knx.receive_write("0/5/0", True) await knx.receive_write("1/7/0", True) await knx.receive_write("2/6/6", True) - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams with wrong payload length diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index e0b4c78e322..c4d0acf0ce2 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -1,9 +1,9 @@ """Test KNX expose.""" from datetime import timedelta -import time -from unittest.mock import patch +from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -15,11 +15,10 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit -from tests.common import async_fire_time_changed_exact +from tests.common import async_fire_time_changed async def test_binary_expose(hass: HomeAssistant, knx: KNXTestKit) -> None: @@ -207,7 +206,9 @@ async def test_expose_string(hass: HomeAssistant, knx: KNXTestKit) -> None: ) -async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_expose_cooldown( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test an expose with cooldown.""" cooldown_time = 2 entity_id = "fake.entity" @@ -235,9 +236,8 @@ async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_no_telegram() # Wait for cooldown to pass - async_fire_time_changed_exact( - hass, dt_util.utcnow() + timedelta(seconds=cooldown_time) - ) + freezer.tick(timedelta(seconds=cooldown_time)) + async_fire_time_changed(hass) await hass.async_block_till_done() await knx.assert_write("1/1/8", (3,)) @@ -327,25 +327,32 @@ async def test_expose_conversion_exception( ) -@patch("time.localtime") +@freeze_time("2022-1-7 9:13:14") +@pytest.mark.parametrize( + ("time_type", "raw"), + [ + ("time", (0xA9, 0x0D, 0x0E)), # localtime includes day of week + ("date", (0x07, 0x01, 0x16)), + ("datetime", (0x7A, 0x1, 0x7, 0xA9, 0xD, 0xE, 0x20, 0xC0)), + ], +) async def test_expose_with_date( - localtime, hass: HomeAssistant, knx: KNXTestKit + hass: HomeAssistant, knx: KNXTestKit, time_type: str, raw: tuple[int, ...] ) -> None: """Test an expose with a date.""" - localtime.return_value = time.struct_time([2022, 1, 7, 9, 13, 14, 6, 0, 0]) await knx.setup_integration( { CONF_KNX_EXPOSE: { - CONF_TYPE: "datetime", + CONF_TYPE: time_type, KNX_ADDRESS: "1/1/8", } } ) - await knx.assert_write("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_write("1/1/8", raw) await knx.receive_read("1/1/8") - await knx.assert_response("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_response("1/1/8", raw) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 diff --git a/tests/components/knx/test_init.py b/tests/components/knx/test_init.py index a317a6a298c..48cc46ef1ee 100644 --- a/tests/components/knx/test_init.py +++ b/tests/components/knx/test_init.py @@ -284,7 +284,6 @@ async def test_async_remove_entry( assert await hass.config_entries.async_remove(config_entry.entry_id) assert unlink_mock.call_count == 3 rmdir_mock.assert_called_once() - await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index 6cf5d8026b9..79114d4ffd5 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -1,23 +1,28 @@ -"""Test KNX scene.""" +"""Test KNX interface device.""" from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState, XknxConnectionType from xknx.telegram import IndividualAddress from homeassistant.components.knx.sensor import SCAN_INTERVAL from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed +from tests.typing import WebSocketGenerator async def test_diagnostic_entities( - hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test diagnostic entities.""" await knx.setup_integration({}) @@ -48,7 +53,8 @@ async def test_diagnostic_entities( knx.xknx.connection_manager.cemi_count_outgoing_error = 2 events = async_capture_events(hass, "state_changed") - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert len(events) == 3 # 5 polled sensors - 2 disabled @@ -64,25 +70,19 @@ async def test_diagnostic_entities( ): assert hass.states.get(entity_id).state == test_state - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.DISCONNECTED ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 4 # 3 not always_available + 3 force_update - 2 disabled events.clear() knx.xknx.current_address = IndividualAddress("1.1.1") - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_UDP, ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 6 # all diagnostic sensors - counters are reset on connect for entity_id, test_state in ( @@ -109,5 +109,29 @@ async def test_removed_entity( "sensor.knx_interface_connection_established", disabled_by=er.RegistryEntryDisabler.USER, ) - await hass.async_block_till_done() unregister_mock.assert_called_once() + + +async def test_remove_interface_device( + hass: HomeAssistant, + knx: KNXTestKit, + device_registry: dr.DeviceRegistry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test device removal.""" + assert await async_setup_component(hass, "config", {}) + await knx.setup_integration({}) + client = await hass_ws_client(hass) + knx_devices = device_registry.devices.get_devices_for_config_entry_id( + knx.mock_config_entry.entry_id + ) + assert len(knx_devices) == 1 + assert knx_devices[0].name == "KNX Interface" + device_id = knx_devices[0].id + # interface device can't be removed + res = await client.remove_device(device_id, knx.mock_config_entry.entry_id) + assert not res["success"] + assert ( + res["error"]["message"] + == "Failed to remove device entry, rejected by integration" + ) diff --git a/tests/components/knx/test_knx_selectors.py b/tests/components/knx/test_knx_selectors.py new file mode 100644 index 00000000000..432a0fb9f80 --- /dev/null +++ b/tests/components/knx/test_knx_selectors.py @@ -0,0 +1,122 @@ +"""Test KNX selectors.""" + +import pytest +import voluptuous as vol + +from homeassistant.components.knx.const import ColorTempModes +from homeassistant.components.knx.storage.knx_selector import GASelector + +INVALID = "invalid" + + +@pytest.mark.parametrize( + ("selector_config", "data", "expected"), + [ + ( + {}, + {}, + {"write": None, "state": None, "passive": []}, + ), + ( + {}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None, "passive": []}, + ), + ( + {}, + {"state": "1/2/3"}, + {"write": None, "state": "1/2/3", "passive": []}, + ), + ( + {}, + {"passive": ["1/2/3"]}, + {"write": None, "state": None, "passive": ["1/2/3"]}, + ), + ( + {}, + {"write": "1", "state": 2, "passive": ["1/2/3"]}, + {"write": "1", "state": 2, "passive": ["1/2/3"]}, + ), + ( + {"write": False}, + {"write": "1/2/3"}, + {"state": None, "passive": []}, + ), + ( + {"write": False}, + {"state": "1/2/3"}, + {"state": "1/2/3", "passive": []}, + ), + ( + {"write": False}, + {"passive": ["1/2/3"]}, + {"state": None, "passive": ["1/2/3"]}, + ), + ( + {"passive": False}, + {"passive": ["1/2/3"]}, + {"write": None, "state": None}, + ), + ( + {"passive": False}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None}, + ), + # required keys + ( + {"write_required": True}, + {}, + INVALID, + ), + ( + {"state_required": True}, + {}, + INVALID, + ), + ( + {"write_required": True}, + {"write": "1/2/3"}, + {"write": "1/2/3", "state": None, "passive": []}, + ), + ( + {"state_required": True}, + {"state": "1/2/3"}, + {"write": None, "state": "1/2/3", "passive": []}, + ), + ( + {"write_required": True}, + {"state": "1/2/3"}, + INVALID, + ), + ( + {"state_required": True}, + {"write": "1/2/3"}, + INVALID, + ), + # dpt key + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3"}, + INVALID, + ), + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3", "dpt": "7.600"}, + {"write": "1/2/3", "state": None, "passive": [], "dpt": "7.600"}, + ), + ( + {"dpt": ColorTempModes}, + {"write": "1/2/3", "state": None, "passive": [], "dpt": "invalid"}, + INVALID, + ), + ], +) +def test_ga_selector(selector_config, data, expected): + """Test GASelector.""" + selector = GASelector(**selector_config) + if expected == INVALID: + with pytest.raises(vol.Invalid): + selector(data) + else: + result = selector(data) + assert result == expected diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index a14d1bb32ae..04f849bb555 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -4,10 +4,11 @@ from __future__ import annotations from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight -from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS +from homeassistant.components.knx.const import CONF_STATE_ADDRESS, KNX_ADDRESS, Platform from homeassistant.components.knx.schema import LightSchema from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -19,8 +20,8 @@ from homeassistant.components.light import ( ) from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util +from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import async_fire_time_changed @@ -91,9 +92,7 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # StateUpdater initialize state await knx.assert_read(test_brightness_state) - await knx.xknx.connection_manager.connection_state_changed( - XknxConnectionState.CONNECTED - ) + knx.xknx.connection_manager.connection_state_changed(XknxConnectionState.CONNECTED) # turn on light via brightness await hass.services.async_call( "light", @@ -644,7 +643,9 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non await knx.assert_write(test_blue, (45,)) -async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_light_rgbw_individual( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX light with rgbw color in individual GAs.""" test_red = "1/1/3" test_red_state = "1/1/4" @@ -764,9 +765,8 @@ async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> No await knx.receive_write(test_green, (0,)) # # individual color debounce takes 0.2 seconds if not all 4 addresses received knx.assert_state("light.test", STATE_ON) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT) - ) + freezer.tick(timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() knx.assert_state("light.test", STATE_OFF) # turn ON from KNX @@ -1151,3 +1151,26 @@ async def test_light_rgbw_brightness(hass: HomeAssistant, knx: KNXTestKit) -> No knx.assert_state( "light.test", STATE_ON, brightness=50, rgbw_color=(100, 200, 55, 12) ) + + +async def test_light_ui_create( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "_light_color_mode_schema": "default", + "sync_state": True, + }, + ) + # created entity sends read-request to KNX bus + await knx.assert_read("2/2/2") + await knx.receive_response("2/2/2", True) + state = hass.states.get("light.test") + assert state.state is STATE_ON diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index 94f2d579fc8..b481675140b 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -21,17 +21,13 @@ async def test_legacy_notify_service_simple( } } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), ) - await hass.services.async_call( "notify", "notify", @@ -41,7 +37,6 @@ async def test_legacy_notify_service_simple( }, blocking=True, ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), @@ -68,12 +63,9 @@ async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodi ] } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True ) - await knx.assert_write( "1/0/0", # "G?nsef??chen" @@ -95,7 +87,6 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } } ) - await hass.services.async_call( notify.DOMAIN, notify.SERVICE_SEND_MESSAGE, diff --git a/tests/components/knx/test_sensor.py b/tests/components/knx/test_sensor.py index 22d9993b58f..41ffcfcb5c7 100644 --- a/tests/components/knx/test_sensor.py +++ b/tests/components/knx/test_sensor.py @@ -68,25 +68,21 @@ async def test_always_callback(hass: HomeAssistant, knx: KNXTestKit) -> None: # receive initial telegram await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 2 # receive second telegram with identical payload # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 3 # receive telegram with different payload await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 5 # receive telegram with second payload again # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 6 diff --git a/tests/components/knx/test_services.py b/tests/components/knx/test_services.py index 7f748af5ceb..f70389dbc92 100644 --- a/tests/components/knx/test_services.py +++ b/tests/components/knx/test_services.py @@ -154,7 +154,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: # no event registered await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event with `type` @@ -165,7 +164,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, (0x04, 0xD2)) - await hass.async_block_till_done() assert len(events) == 1 typed_event = events.pop() assert typed_event.data["data"] == (0x04, 0xD2) @@ -179,7 +177,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event without `type` @@ -188,7 +185,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.receive_write(test_address, True) await knx.receive_write(test_address, False) - await hass.async_block_till_done() assert len(events) == 2 untyped_event_2 = events.pop() assert untyped_event_2.data["data"] is False diff --git a/tests/components/knx/test_switch.py b/tests/components/knx/test_switch.py index 8dce4cf9c27..bc0a6b27675 100644 --- a/tests/components/knx/test_switch.py +++ b/tests/components/knx/test_switch.py @@ -6,9 +6,10 @@ from homeassistant.components.knx.const import ( KNX_ADDRESS, ) from homeassistant.components.knx.schema import SwitchSchema -from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON +from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant, State +from . import KnxEntityGenerator from .conftest import KNXTestKit from tests.common import mock_restore_cache @@ -146,3 +147,27 @@ async def test_switch_restore_and_respond(hass: HomeAssistant, knx) -> None: # respond to new state await knx.receive_read(_ADDRESS) await knx.assert_response(_ADDRESS, False) + + +async def test_switch_ui_create( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.SWITCH, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "respond_to_read": True, + "sync_state": True, + "invert": False, + }, + ) + # created entity sends read-request to KNX bus + await knx.assert_read("2/2/2") + await knx.receive_response("2/2/2", True) + state = hass.states.get("switch.test") + assert state.state is STATE_ON diff --git a/tests/components/knx/test_trigger.py b/tests/components/knx/test_trigger.py index d957082de18..73e8b10840e 100644 --- a/tests/components/knx/test_trigger.py +++ b/tests/components/knx/test_trigger.py @@ -11,18 +11,10 @@ from homeassistant.setup import async_setup_component from .conftest import KNXTestKit -from tests.common import async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - async def test_telegram_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test telegram triggers firing.""" @@ -73,24 +65,24 @@ async def test_telegram_trigger( # "specific" shall ignore destination address await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["id"] == 0 await knx.receive_write("1/2/4", (0x03, 0x2F)) - assert len(calls) == 2 - test_call = calls.pop() + assert len(service_calls) == 2 + test_call = service_calls.pop() assert test_call.data["specific"] == "telegram - 1/2/4" assert test_call.data["id"] == "test-id" - test_call = calls.pop() + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 # "specific" shall ignore GroupValueRead await knx.receive_read("1/2/4") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 1/2/4" assert test_call.data["id"] == 0 @@ -105,7 +97,7 @@ async def test_telegram_trigger( ) async def test_telegram_trigger_dpt_option( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, payload: tuple[int, ...], type_option: dict[str, bool], @@ -138,16 +130,16 @@ async def test_telegram_trigger_dpt_option( ) await knx.receive_write("0/0/1", payload) - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] == expected_value assert test_call.data["trigger"]["unit"] == expected_unit await knx.receive_read("0/0/1") - assert len(calls) == 1 - test_call = calls.pop() + assert len(service_calls) == 1 + test_call = service_calls.pop() assert test_call.data["catch_all"] == "telegram - 0/0/1" assert test_call.data["trigger"]["value"] is None assert test_call.data["trigger"]["unit"] is None @@ -192,7 +184,7 @@ async def test_telegram_trigger_dpt_option( ) async def test_telegram_trigger_options( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, group_value_options: dict[str, bool], direction_options: dict[str, bool], @@ -225,28 +217,28 @@ async def test_telegram_trigger_options( if group_value_options.get("group_value_write", True) and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await knx.receive_response("0/0/1", 1) if group_value_options["group_value_response"] and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await knx.receive_read("0/0/1") if group_value_options["group_value_read"] and direction_options.get( "incoming", True ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 0 await hass.services.async_call( "knx", @@ -254,20 +246,22 @@ async def test_telegram_trigger_options( {"address": "0/0/1", "payload": True}, blocking=True, ) + assert len(service_calls) == 1 + await knx.assert_write("0/0/1", True) if ( group_value_options.get("group_value_write", True) and direction_options["outgoing"] ): - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 2 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" else: - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_remove_telegram_trigger( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], knx: KNXTestKit, ) -> None: """Test for removed callback when telegram trigger not used.""" @@ -296,8 +290,8 @@ async def test_remove_telegram_trigger( ) await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 1 - assert calls.pop().data["catch_all"] == "telegram - 0/0/1" + assert len(service_calls) == 1 + assert service_calls.pop().data["catch_all"] == "telegram - 0/0/1" await hass.services.async_call( automation.DOMAIN, @@ -305,8 +299,10 @@ async def test_remove_telegram_trigger( {ATTR_ENTITY_ID: f"automation.{automation_name}"}, blocking=True, ) + assert len(service_calls) == 1 + await knx.receive_write("0/0/1", (0x03, 0x2F)) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_invalid_trigger( @@ -338,7 +334,6 @@ async def test_invalid_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_weather.py b/tests/components/knx/test_weather.py index 0adcc309252..5ba38d6cdf8 100644 --- a/tests/components/knx/test_weather.py +++ b/tests/components/knx/test_weather.py @@ -45,12 +45,12 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # brightness await knx.assert_read("1/1/6") - await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.assert_read("1/1/8") + await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.receive_response("1/1/8", (0x7C, 0x5E)) + await knx.assert_read("1/1/5") await knx.assert_read("1/1/7") await knx.receive_response("1/1/7", (0x7C, 0x5E)) - await knx.assert_read("1/1/5") await knx.receive_response("1/1/5", (0x7C, 0x5E)) # wind speed @@ -64,10 +64,10 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # alarms await knx.assert_read("1/1/2") await knx.receive_response("1/1/2", False) - await knx.assert_read("1/1/3") - await knx.receive_response("1/1/3", False) await knx.assert_read("1/1/1") + await knx.assert_read("1/1/3") await knx.receive_response("1/1/1", False) + await knx.receive_response("1/1/3", False) # day night await knx.assert_read("1/1/12") diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index ca60905b0ba..309ea111709 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -4,6 +4,7 @@ from typing import Any from unittest.mock import patch from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema +from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant @@ -87,6 +88,7 @@ async def test_knx_project_file_process( assert res["success"], res assert hass.data[DOMAIN].project.loaded + assert hass_storage[KNX_PROJECT_STORAGE_KEY]["data"] == _parse_result async def test_knx_project_file_process_error( @@ -126,19 +128,20 @@ async def test_knx_project_file_remove( knx: KNXTestKit, hass_ws_client: WebSocketGenerator, load_knxproj: None, + hass_storage: dict[str, Any], ) -> None: """Test knx/project_file_remove command.""" await knx.setup_integration({}) + assert hass_storage[KNX_PROJECT_STORAGE_KEY] client = await hass_ws_client(hass) assert hass.data[DOMAIN].project.loaded await client.send_json({"id": 6, "type": "knx/project_file_remove"}) - with patch("homeassistant.helpers.storage.Store.async_remove") as remove_mock: - res = await client.receive_json() - remove_mock.assert_called_once_with() + res = await client.receive_json() assert res["success"], res assert not hass.data[DOMAIN].project.loaded + assert not hass_storage.get(KNX_PROJECT_STORAGE_KEY) async def test_knx_get_project( @@ -343,7 +346,7 @@ async def test_knx_subscribe_telegrams_command_project( assert res["event"]["destination"] == "0/1/1" assert res["event"]["destination_name"] == "percent" assert res["event"]["payload"] == 1 - assert res["event"]["value"] == "Error decoding value" + assert res["event"]["value"] is None assert res["event"]["telegramtype"] == "GroupValueWrite" assert res["event"]["source"] == "1.1.6" assert ( diff --git a/tests/components/kodi/test_config_flow.py b/tests/components/kodi/test_config_flow.py index d570654be93..ad99067ac7a 100644 --- a/tests/components/kodi/test_config_flow.py +++ b/tests/components/kodi/test_config_flow.py @@ -30,7 +30,7 @@ from tests.common import MockConfigEntry @pytest.fixture -async def user_flow(hass): +async def user_flow(hass: HomeAssistant) -> str: """Return a user-initiated flow after filling in host info.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -41,7 +41,7 @@ async def user_flow(hass): return result["flow_id"] -async def test_user_flow(hass: HomeAssistant, user_flow) -> None: +async def test_user_flow(hass: HomeAssistant, user_flow: str) -> None: """Test a successful user initiated flow.""" with ( patch( @@ -74,7 +74,7 @@ async def test_user_flow(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_auth(hass: HomeAssistant, user_flow) -> None: +async def test_form_valid_auth(hass: HomeAssistant, user_flow: str) -> None: """Test we handle valid auth.""" with ( patch( @@ -124,7 +124,7 @@ async def test_form_valid_auth(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_valid_ws_port(hass: HomeAssistant, user_flow) -> None: +async def test_form_valid_ws_port(hass: HomeAssistant, user_flow: str) -> None: """Test we handle valid websocket port.""" with ( patch( @@ -180,7 +180,7 @@ async def test_form_valid_ws_port(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_empty_ws_port(hass: HomeAssistant, user_flow) -> None: +async def test_form_empty_ws_port(hass: HomeAssistant, user_flow: str) -> None: """Test we handle an empty websocket port input.""" with ( patch( @@ -226,7 +226,7 @@ async def test_form_empty_ws_port(hass: HomeAssistant, user_flow) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_auth(hass: HomeAssistant, user_flow) -> None: +async def test_form_invalid_auth(hass: HomeAssistant, user_flow: str) -> None: """Test we handle invalid auth.""" with ( patch( @@ -322,7 +322,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {} -async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow) -> None: +async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow: str) -> None: """Test we handle cannot connect over HTTP error.""" with ( patch( @@ -341,7 +341,7 @@ async def test_form_cannot_connect_http(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_http(hass: HomeAssistant, user_flow) -> None: +async def test_form_exception_http(hass: HomeAssistant, user_flow: str) -> None: """Test we handle generic exception over HTTP.""" with ( patch( @@ -360,7 +360,7 @@ async def test_form_exception_http(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "unknown"} -async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow) -> None: +async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow: str) -> None: """Test we handle cannot connect over WebSocket error.""" with ( patch( @@ -423,7 +423,7 @@ async def test_form_cannot_connect_ws(hass: HomeAssistant, user_flow) -> None: assert result["errors"] == {"base": "cannot_connect"} -async def test_form_exception_ws(hass: HomeAssistant, user_flow) -> None: +async def test_form_exception_ws(hass: HomeAssistant, user_flow: str) -> None: """Test we handle generic exception over WebSocket.""" with ( patch( @@ -560,7 +560,7 @@ async def test_discovery_cannot_connect_ws(hass: HomeAssistant) -> None: assert result["errors"] == {} -async def test_discovery_exception_http(hass: HomeAssistant, user_flow) -> None: +async def test_discovery_exception_http(hass: HomeAssistant) -> None: """Test we handle generic exception during discovery validation.""" with ( patch( diff --git a/tests/components/kodi/test_device_trigger.py b/tests/components/kodi/test_device_trigger.py index d3de349018e..a54641a4234 100644 --- a/tests/components/kodi/test_device_trigger.py +++ b/tests/components/kodi/test_device_trigger.py @@ -12,11 +12,7 @@ from homeassistant.setup import async_setup_component from . import init_integration -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -25,13 +21,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -async def kodi_media_player(hass): +async def kodi_media_player(hass: HomeAssistant) -> str: """Get a kodi media player.""" await init_integration(hass) return f"{MP_DOMAIN}.name" @@ -77,8 +67,8 @@ async def test_get_triggers( async def test_if_fires_on_state_change( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], - kodi_media_player, + service_calls: list[ServiceCall], + kodi_media_player: str, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -135,8 +125,8 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" await hass.services.async_call( MP_DOMAIN, @@ -146,15 +136,15 @@ async def test_if_fires_on_state_change( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == f"turn_off - {kodi_media_player} - 0" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == f"turn_off - {kodi_media_player} - 0" async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], - kodi_media_player, + service_calls: list[ServiceCall], + kodi_media_player: str, ) -> None: """Test for turn_on and turn_off triggers firing.""" entry = entity_registry.async_get(kodi_media_player) @@ -194,5 +184,5 @@ async def test_if_fires_on_state_change_legacy( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == f"turn_on - {kodi_media_player} - 0" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == f"turn_on - {kodi_media_player} - 0" diff --git a/tests/components/kostal_plenticore/conftest.py b/tests/components/kostal_plenticore/conftest.py index af958f19f3a..acce8ebed7a 100644 --- a/tests/components/kostal_plenticore/conftest.py +++ b/tests/components/kostal_plenticore/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import MeData, VersionData import pytest -from typing_extensions import Generator from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_config_flow.py b/tests/components/kostal_plenticore/test_config_flow.py index c982e2af818..bd9b9ad278d 100644 --- a/tests/components/kostal_plenticore/test_config_flow.py +++ b/tests/components/kostal_plenticore/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Kostal Plenticore Solar Inverter config flow.""" +from collections.abc import Generator from unittest.mock import ANY, AsyncMock, MagicMock, patch from pykoplenti import ApiClient, AuthenticationException, SettingsData import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.kostal_plenticore.const import DOMAIN diff --git a/tests/components/kostal_plenticore/test_diagnostics.py b/tests/components/kostal_plenticore/test_diagnostics.py index 1c3a9efe2e5..0f358260be7 100644 --- a/tests/components/kostal_plenticore/test_diagnostics.py +++ b/tests/components/kostal_plenticore/test_diagnostics.py @@ -6,7 +6,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.kostal_plenticore.coordinator import Plenticore from homeassistant.core import HomeAssistant -from tests.common import MockConfigEntry +from tests.common import ANY, MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -54,6 +54,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": None, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "client": { "version": "api_version='0.2.0' hostname='scb' name='PUCK RESTful API' sw_version='01.16.05025'", diff --git a/tests/components/kostal_plenticore/test_helper.py b/tests/components/kostal_plenticore/test_helper.py index a18cf32c5a1..acd33f82a27 100644 --- a/tests/components/kostal_plenticore/test_helper.py +++ b/tests/components/kostal_plenticore/test_helper.py @@ -1,10 +1,10 @@ """Test Kostal Plenticore helper.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pykoplenti import ApiClient, ExtendedApiClient, SettingsData import pytest -from typing_extensions import Generator from homeassistant.components.kostal_plenticore.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/kostal_plenticore/test_number.py b/tests/components/kostal_plenticore/test_number.py index 9d94c6f9951..586129c486d 100644 --- a/tests/components/kostal_plenticore/test_number.py +++ b/tests/components/kostal_plenticore/test_number.py @@ -1,11 +1,11 @@ """Test Kostal Plenticore number.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from pykoplenti import ApiClient, SettingsData import pytest -from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, diff --git a/tests/components/kulersky/test_light.py b/tests/components/kulersky/test_light.py index 90f40d327e4..a2245e721c5 100644 --- a/tests/components/kulersky/test_light.py +++ b/tests/components/kulersky/test_light.py @@ -1,5 +1,6 @@ """Test the Kuler Sky lights.""" +from collections.abc import AsyncGenerator from unittest.mock import MagicMock, patch import pykulersky @@ -37,13 +38,15 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry(hass): +async def mock_entry() -> MockConfigEntry: """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass, mock_entry): +async def mock_light( + hass: HomeAssistant, mock_entry: MockConfigEntry +) -> AsyncGenerator[MagicMock]: """Create a mock light entity.""" light = MagicMock(spec=pykulersky.Light) @@ -64,7 +67,7 @@ async def mock_light(hass, mock_entry): yield light -async def test_init(hass: HomeAssistant, mock_light) -> None: +async def test_init(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test platform setup.""" state = hass.states.get("light.bedroom") assert state.state == STATE_OFF @@ -87,7 +90,9 @@ async def test_init(hass: HomeAssistant, mock_light) -> None: assert mock_light.disconnect.called -async def test_remove_entry(hass: HomeAssistant, mock_light, mock_entry) -> None: +async def test_remove_entry( + hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry +) -> None: """Test platform setup.""" assert hass.data[DOMAIN][DATA_ADDRESSES] == {"AA:BB:CC:11:22:33"} assert DATA_DISCOVERY_SUBSCRIPTION in hass.data[DOMAIN] @@ -99,7 +104,7 @@ async def test_remove_entry(hass: HomeAssistant, mock_light, mock_entry) -> None async def test_remove_entry_exceptions_caught( - hass: HomeAssistant, mock_light, mock_entry + hass: HomeAssistant, mock_light: MagicMock, mock_entry: MockConfigEntry ) -> None: """Assert that disconnect exceptions are caught.""" mock_light.disconnect.side_effect = pykulersky.PykulerskyException("Mock error") @@ -108,7 +113,7 @@ async def test_remove_entry_exceptions_caught( assert mock_light.disconnect.called -async def test_update_exception(hass: HomeAssistant, mock_light) -> None: +async def test_update_exception(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test platform setup.""" mock_light.get_color.side_effect = pykulersky.PykulerskyException @@ -118,7 +123,7 @@ async def test_update_exception(hass: HomeAssistant, mock_light) -> None: assert state.state == STATE_UNAVAILABLE -async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: +async def test_light_turn_on(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (255, 255, 255, 255) await hass.services.async_call( @@ -175,7 +180,7 @@ async def test_light_turn_on(hass: HomeAssistant, mock_light) -> None: mock_light.set_color.assert_called_with(50, 41, 0, 50) -async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: +async def test_light_turn_off(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight turn_on.""" mock_light.get_color.return_value = (0, 0, 0, 0) await hass.services.async_call( @@ -188,7 +193,7 @@ async def test_light_turn_off(hass: HomeAssistant, mock_light) -> None: mock_light.set_color.assert_called_with(0, 0, 0, 0) -async def test_light_update(hass: HomeAssistant, mock_light) -> None: +async def test_light_update(hass: HomeAssistant, mock_light: MagicMock) -> None: """Test KulerSkyLight update.""" utcnow = dt_util.utcnow() diff --git a/tests/components/lacrosse_view/conftest.py b/tests/components/lacrosse_view/conftest.py index a6294c64210..4f1bfdc5748 100644 --- a/tests/components/lacrosse_view/conftest.py +++ b/tests/components/lacrosse_view/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for LaCrosse View tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lacrosse_view/test_diagnostics.py b/tests/components/lacrosse_view/test_diagnostics.py index 08cef64a935..dc48f160113 100644 --- a/tests/components/lacrosse_view/test_diagnostics.py +++ b/tests/components/lacrosse_view/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.lacrosse_view import DOMAIN from homeassistant.core import HomeAssistant @@ -32,7 +33,6 @@ async def test_entry_diagnostics( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/lamarzocco/conftest.py b/tests/components/lamarzocco/conftest.py index 6741ac0797c..1a4fbbd4a0c 100644 --- a/tests/components/lamarzocco/conftest.py +++ b/tests/components/lamarzocco/conftest.py @@ -1,6 +1,6 @@ """Lamarzocco session fixtures.""" -from collections.abc import Callable +from collections.abc import Generator import json from unittest.mock import MagicMock, patch @@ -9,7 +9,6 @@ from lmcloud.const import FirmwareType, MachineModel, SteamLevel from lmcloud.lm_machine import LaMarzoccoMachine from lmcloud.models import LaMarzoccoDeviceInfo import pytest -from typing_extensions import Generator from homeassistant.components.lamarzocco.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_MODEL, CONF_NAME, CONF_TOKEN @@ -129,14 +128,6 @@ def mock_lamarzocco(device_fixture: MachineModel) -> Generator[MagicMock]: lamarzocco.firmware[FirmwareType.GATEWAY].latest_version = "v3.5-rc3" lamarzocco.firmware[FirmwareType.MACHINE].latest_version = "1.55" - async def websocket_connect_mock( - notify_callback: Callable | None, - ) -> None: - """Mock the websocket connect method.""" - return None - - lamarzocco.websocket_connect = websocket_connect_mock - yield lamarzocco diff --git a/tests/components/lamarzocco/snapshots/test_switch.ambr b/tests/components/lamarzocco/snapshots/test_switch.ambr index edda4ffee3b..4ec22e3123d 100644 --- a/tests/components/lamarzocco/snapshots/test_switch.ambr +++ b/tests/components/lamarzocco/snapshots/test_switch.ambr @@ -113,6 +113,7 @@ }), 'manufacturer': 'La Marzocco', 'model': , + 'model_id': None, 'name': 'GS01234', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/lamarzocco/snapshots/test_update.ambr b/tests/components/lamarzocco/snapshots/test_update.ambr index 4ab8e35ffd0..f08b9249f50 100644 --- a/tests/components/lamarzocco/snapshots/test_update.ambr +++ b/tests/components/lamarzocco/snapshots/test_update.ambr @@ -10,7 +10,7 @@ 'installed_version': 'v3.1-rc4', 'latest_version': 'v3.5-rc3', 'release_summary': None, - 'release_url': None, + 'release_url': 'https://support-iot.lamarzocco.com/firmware-updates/', 'skipped_version': None, 'supported_features': , 'title': None, @@ -67,7 +67,7 @@ 'installed_version': '1.40', 'latest_version': '1.55', 'release_summary': None, - 'release_url': None, + 'release_url': 'https://support-iot.lamarzocco.com/firmware-updates/', 'skipped_version': None, 'supported_features': , 'title': None, diff --git a/tests/components/lametric/conftest.py b/tests/components/lametric/conftest.py index dd3885b78d9..e8ba727f3db 100644 --- a/tests/components/lametric/conftest.py +++ b/tests/components/lametric/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from demetriek import CloudDevice, Device from pydantic import parse_raw_as # pylint: disable=no-name-in-module import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/lametric/test_button.py b/tests/components/lametric/test_button.py index a6cdca5b426..04efeaac87f 100644 --- a/tests/components/lametric/test_button.py +++ b/tests/components/lametric/test_button.py @@ -49,7 +49,7 @@ async def test_button_app_next( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -95,7 +95,7 @@ async def test_button_app_previous( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -143,7 +143,7 @@ async def test_button_dismiss_current_notification( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None @@ -191,7 +191,7 @@ async def test_button_dismiss_all_notifications( assert device_entry.entry_type is None assert device_entry.identifiers == {(DOMAIN, "SA110405124500W00BS9")} assert device_entry.manufacturer == "LaMetric Inc." - assert device_entry.model == "LM 37X8" + assert device_entry.model_id == "LM 37X8" assert device_entry.name == "Frenck's LaMetric" assert device_entry.sw_version == "2.2.2" assert device_entry.hw_version is None diff --git a/tests/components/landisgyr_heat_meter/conftest.py b/tests/components/landisgyr_heat_meter/conftest.py index 22f29b3a4b1..1dad983c909 100644 --- a/tests/components/landisgyr_heat_meter/conftest.py +++ b/tests/components/landisgyr_heat_meter/conftest.py @@ -1,9 +1,9 @@ """Define fixtures for Landis + Gyr Heat Meter tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lastfm/__init__.py b/tests/components/lastfm/__init__.py index 9fe946f8dff..e4eb476f62d 100644 --- a/tests/components/lastfm/__init__.py +++ b/tests/components/lastfm/__init__.py @@ -1,5 +1,6 @@ """The tests for lastfm.""" +from typing import Any from unittest.mock import patch from pylast import PyLastError, Track @@ -91,7 +92,7 @@ class MockUser: """Get mock now playing.""" return self._now_playing_result - def get_friends(self) -> list[any]: + def get_friends(self) -> list[Any]: """Get mock friends.""" if len(self._friends) == 0: raise PyLastError("network", "status", "Page not found") diff --git a/tests/components/laundrify/conftest.py b/tests/components/laundrify/conftest.py index 91aeebf81ee..2f6496c06a5 100644 --- a/tests/components/laundrify/conftest.py +++ b/tests/components/laundrify/conftest.py @@ -3,6 +3,7 @@ import json from unittest.mock import patch +from laundrify_aio import LaundrifyAPI, LaundrifyDevice import pytest from .const import VALID_ACCESS_TOKEN, VALID_ACCOUNT_ID @@ -49,7 +50,10 @@ def laundrify_api_fixture(laundrify_exchange_code, laundrify_validate_token): ), patch( "laundrify_aio.LaundrifyAPI.get_machines", - return_value=json.loads(load_fixture("laundrify/machines.json")), + return_value=[ + LaundrifyDevice(machine, LaundrifyAPI) + for machine in json.loads(load_fixture("laundrify/machines.json")) + ], ) as get_machines_mock, ): yield get_machines_mock diff --git a/tests/components/laundrify/fixtures/machines.json b/tests/components/laundrify/fixtures/machines.json index ab1a737cb45..3397212659f 100644 --- a/tests/components/laundrify/fixtures/machines.json +++ b/tests/components/laundrify/fixtures/machines.json @@ -1,8 +1,10 @@ [ { - "_id": "14", + "id": "14", "name": "Demo Waschmaschine", "status": "OFF", + "internalIP": "192.168.0.123", + "model": "SU02", "firmwareVersion": "2.1.0" } ] diff --git a/tests/components/lawn_mower/test_init.py b/tests/components/lawn_mower/test_init.py index e7066ed43c1..16f32da7e04 100644 --- a/tests/components/lawn_mower/test_init.py +++ b/tests/components/lawn_mower/test_init.py @@ -1,9 +1,9 @@ """The tests for the lawn mower integration.""" +from collections.abc import Generator from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.lawn_mower import ( DOMAIN as LAWN_MOWER_DOMAIN, diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index f24fdbc054f..2884bc833c2 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -12,11 +12,10 @@ import pytest from homeassistant.components.lcn.const import DOMAIN from homeassistant.components.lcn.helpers import generate_unique_id from homeassistant.const import CONF_HOST -from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, async_mock_service, load_fixture +from tests.common import MockConfigEntry, load_fixture class MockModuleConnection(ModuleConnection): @@ -78,12 +77,6 @@ def create_config_entry(name): ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(name="entry") def create_config_entry_pchk(): """Return one specific config entry.""" diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 67bd7568254..6c5ab7d6f4e 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -72,7 +72,7 @@ async def test_get_triggers_non_module_device( async def test_if_fires_on_transponder_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transponder event triggers firing.""" address = (0, 7, False) @@ -111,15 +111,15 @@ async def test_if_fires_on_transponder_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_transponder", "code": "aabbcc", } async def test_if_fires_on_fingerprint_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for fingerprint event triggers firing.""" address = (0, 7, False) @@ -158,15 +158,15 @@ async def test_if_fires_on_fingerprint_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_fingerprint", "code": "aabbcc", } async def test_if_fires_on_codelock_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for codelock event triggers firing.""" address = (0, 7, False) @@ -205,15 +205,15 @@ async def test_if_fires_on_codelock_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_codelock", "code": "aabbcc", } async def test_if_fires_on_transmitter_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for transmitter event triggers firing.""" address = (0, 7, False) @@ -258,8 +258,8 @@ async def test_if_fires_on_transmitter_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_transmitter", "code": "aabbcc", "level": 0, @@ -269,7 +269,7 @@ async def test_if_fires_on_transmitter_event( async def test_if_fires_on_send_keys_event( - hass: HomeAssistant, calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection ) -> None: """Test for send_keys event triggers firing.""" address = (0, 7, False) @@ -309,8 +309,8 @@ async def test_if_fires_on_send_keys_event( await lcn_connection.async_process_input(inp) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == { + assert len(service_calls) == 1 + assert service_calls[0].data == { "test": "test_trigger_send_keys", "key": "a1", "action": "hit", diff --git a/tests/components/lg_netcast/conftest.py b/tests/components/lg_netcast/conftest.py deleted file mode 100644 index eb13d5c8c67..00000000000 --- a/tests/components/lg_netcast/conftest.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Common fixtures and objects for the LG Netcast integration tests.""" - -import pytest - -from homeassistant.core import HomeAssistant, ServiceCall - -from tests.common import async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/lg_netcast/test_device_trigger.py b/tests/components/lg_netcast/test_device_trigger.py index 05911acc41d..c8d725afde1 100644 --- a/tests/components/lg_netcast/test_device_trigger.py +++ b/tests/components/lg_netcast/test_device_trigger.py @@ -43,7 +43,9 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for turn_on triggers firing.""" await setup_lgnetcast(hass) @@ -96,11 +98,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == ENTITY_ID - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == ENTITY_ID + assert service_calls[2].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/lg_netcast/test_trigger.py b/tests/components/lg_netcast/test_trigger.py index b0c2a86ec21..d838b931560 100644 --- a/tests/components/lg_netcast/test_trigger.py +++ b/tests/components/lg_netcast/test_trigger.py @@ -18,7 +18,9 @@ from tests.common import MockEntity, MockEntityPlatform async def test_lg_netcast_turn_on_trigger_device_id( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for turn_on trigger by device_id firing.""" await setup_lgnetcast(hass) @@ -56,14 +58,14 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -74,11 +76,11 @@ async def test_lg_netcast_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_lg_netcast_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for turn_on triggers by entity firing.""" await setup_lgnetcast(hass) @@ -113,9 +115,9 @@ async def test_lg_netcast_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == ENTITY_ID - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == ENTITY_ID + assert service_calls[1].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/lidarr/conftest.py b/tests/components/lidarr/conftest.py index 588acb2b87f..1024aadc403 100644 --- a/tests/components/lidarr/conftest.py +++ b/tests/components/lidarr/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from http import HTTPStatus from aiohttp.client_exceptions import ClientError from aiopyarr.lidarr_client import LidarrClient import pytest -from typing_extensions import Generator from homeassistant.components.lidarr.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 505d212a352..4834e486ec0 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -172,6 +172,19 @@ def _mocked_tile() -> Light: bulb.effect = {"effect": "OFF"} bulb.get_tile_effect = MockLifxCommand(bulb) bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) + return bulb + + +def _mocked_ceiling() -> Light: + bulb = _mocked_bulb() + bulb.product = 176 # LIFX Ceiling + bulb.effect = {"effect": "OFF"} + bulb.get_tile_effect = MockLifxCommand(bulb) + bulb.set_tile_effect = MockLifxCommand(bulb) + bulb.get64 = MockLifxCommand(bulb) + bulb.get_device_chain = MockLifxCommand(bulb) return bulb diff --git a/tests/components/lifx/conftest.py b/tests/components/lifx/conftest.py index 093f2309e53..5cb7c702f43 100644 --- a/tests/components/lifx/conftest.py +++ b/tests/components/lifx/conftest.py @@ -8,8 +8,6 @@ from homeassistant.components.lifx import config_flow, coordinator, util from . import _patch_discovery -from tests.common import mock_device_registry, mock_registry - @pytest.fixture def mock_discovery(): @@ -61,15 +59,3 @@ def lifx_mock_async_get_ipv4_broadcast_addresses(): return_value=["255.255.255.255"], ): yield - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 56630053cc0..9972bc1021a 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -11,15 +11,19 @@ from homeassistant.components.lifx import DOMAIN from homeassistant.components.lifx.const import ATTR_POWER from homeassistant.components.lifx.light import ATTR_INFRARED, ATTR_ZONES from homeassistant.components.lifx.manager import ( + ATTR_CLOUD_SATURATION_MAX, + ATTR_CLOUD_SATURATION_MIN, ATTR_DIRECTION, ATTR_PALETTE, ATTR_SATURATION_MAX, ATTR_SATURATION_MIN, + ATTR_SKY_TYPE, ATTR_SPEED, ATTR_THEME, SERVICE_EFFECT_COLORLOOP, SERVICE_EFFECT_MORPH, SERVICE_EFFECT_MOVE, + SERVICE_EFFECT_SKY, ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -62,6 +66,7 @@ from . import ( _mocked_brightness_bulb, _mocked_bulb, _mocked_bulb_new_firmware, + _mocked_ceiling, _mocked_clean_bulb, _mocked_light_strip, _mocked_tile, @@ -691,6 +696,7 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: entity_id = "light.my_bulb" + # FLAME effect test await hass.services.async_call( LIGHT_DOMAIN, "turn_on", @@ -707,11 +713,15 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: "effect": 3, "speed": 3, "palette": [], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() bulb.set_power.reset_mock() + # MORPH effect tests bulb.power_level = 0 await hass.services.async_call( DOMAIN, @@ -750,6 +760,9 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (8920, 65535, 32768, 3500), (10558, 65535, 32768, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() @@ -808,6 +821,140 @@ async def test_matrix_flame_morph_effects(hass: HomeAssistant) -> None: (43690, 65535, 65535, 3500), (54613, 65535, 65535, 3500), ], + "sky_type": None, + "cloud_saturation_min": None, + "cloud_saturation_max": None, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + +@pytest.mark.usefixtures("mock_discovery") +async def test_sky_effect(hass: HomeAssistant) -> None: + """Test the firmware sky effect on a ceiling device.""" + config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=SERIAL + ) + config_entry.add_to_hass(hass) + bulb = _mocked_ceiling() + bulb.power_level = 0 + bulb.color = [65535, 65535, 65535, 65535] + with ( + _patch_discovery(device=bulb), + _patch_config_flow_try_connect(device=bulb), + _patch_device(device=bulb), + ): + await async_setup_component(hass, lifx.DOMAIN, {lifx.DOMAIN: {}}) + await hass.async_block_till_done() + + entity_id = "light.my_bulb" + + # SKY effect test + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [], + ATTR_SKY_TYPE: "Clouds", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [], + "sky_type": 2, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + bulb.get_tile_effect.reset_mock() + bulb.set_tile_effect.reset_mock() + bulb.set_power.reset_mock() + + bulb.power_level = 0 + await hass.services.async_call( + DOMAIN, + SERVICE_EFFECT_SKY, + { + ATTR_ENTITY_ID: entity_id, + ATTR_PALETTE: [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + ATTR_SKY_TYPE: "Sunrise", + ATTR_CLOUD_SATURATION_MAX: 180, + ATTR_CLOUD_SATURATION_MIN: 50, + }, + blocking=True, + ) + + bulb.power_level = 65535 + bulb.effect = { + "effect": "SKY", + "palette": [ + (200, 100, 1, 3500), + (241, 100, 1, 3500), + (189, 100, 8, 3500), + (40, 100, 100, 3500), + (40, 50, 100, 3500), + (0, 0, 100, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, + } + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done(wait_background_tasks=True) + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + assert len(bulb.set_power.calls) == 1 + assert len(bulb.set_tile_effect.calls) == 1 + call_dict = bulb.set_tile_effect.calls[0][1] + call_dict.pop("callb") + assert call_dict == { + "effect": 5, + "speed": 50, + "palette": [ + (36408, 65535, 65535, 3500), + (43872, 65535, 65535, 3500), + (34406, 65535, 5243, 3500), + (7281, 65535, 65535, 3500), + (7281, 32768, 65535, 3500), + (0, 0, 65535, 6500), + ], + "sky_type": 0, + "cloud_saturation_min": 50, + "cloud_saturation_max": 180, } bulb.get_tile_effect.reset_mock() bulb.set_tile_effect.reset_mock() diff --git a/tests/components/lifx/test_migration.py b/tests/components/lifx/test_migration.py index 0604ee1c8a7..e5b2f9f8167 100644 --- a/tests/components/lifx/test_migration.py +++ b/tests/components/lifx/test_migration.py @@ -11,8 +11,6 @@ from homeassistant.components.lifx import DOMAIN, discovery from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STARTED from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.helpers.device_registry import DeviceRegistry -from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -31,20 +29,22 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_migration_device_online_end_to_end( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -67,7 +67,7 @@ async def test_migration_device_online_end_to_end( assert device.config_entries == {migrated_entry.entry_id} assert light_entity_reg.config_entry_id == migrated_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] + assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) await hass.async_block_till_done() @@ -84,20 +84,22 @@ async def test_migration_device_online_end_to_end( async def test_discovery_is_more_frequent_during_migration( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test that discovery is more frequent during migration.""" config_entry = MockConfigEntry( domain=DOMAIN, title="LEGACY", data={}, unique_id=DOMAIN ) config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - entity_reg.async_get_or_create( + entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -160,7 +162,9 @@ async def test_discovery_is_more_frequent_during_migration( async def test_migration_device_online_end_to_end_after_downgrade( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry can happen again after a downgrade.""" config_entry = MockConfigEntry( @@ -172,13 +176,13 @@ async def test_migration_device_online_end_to_end_after_downgrade( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=SERIAL ) already_migrated_config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -197,7 +201,7 @@ async def test_migration_device_online_end_to_end_after_downgrade( assert device.config_entries == {config_entry.entry_id} assert light_entity_reg.config_entry_id == config_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, config_entry) == [] + assert er.async_entries_for_config_entry(entity_registry, config_entry) == [] legacy_entry = None for entry in hass.config_entries.async_entries(DOMAIN): @@ -209,7 +213,9 @@ async def test_migration_device_online_end_to_end_after_downgrade( async def test_migration_device_online_end_to_end_ignores_other_devices( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, ) -> None: """Test migration from single config entry.""" legacy_config_entry = MockConfigEntry( @@ -221,18 +227,18 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( domain="other_domain", data={}, unique_id="other_domain" ) other_domain_config_entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=legacy_config_entry.entry_id, identifiers={(DOMAIN, SERIAL)}, connections={(dr.CONNECTION_NETWORK_MAC, MAC_ADDRESS)}, name=LABEL, ) - other_device = device_reg.async_get_or_create( + other_device = device_registry.async_get_or_create( config_entry_id=other_domain_config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "556655665566")}, name=LABEL, ) - light_entity_reg = entity_reg.async_get_or_create( + light_entity_reg = entity_registry.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="light", @@ -240,7 +246,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - ignored_entity_reg = entity_reg.async_get_or_create( + ignored_entity_reg = entity_registry.async_get_or_create( config_entry=other_domain_config_entry, platform=DOMAIN, domain="sensor", @@ -248,7 +254,7 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( original_name=LABEL, device_id=device.id, ) - garbage_entity_reg = entity_reg.async_get_or_create( + garbage_entity_reg = entity_registry.async_get_or_create( config_entry=legacy_config_entry, platform=DOMAIN, domain="sensor", @@ -281,5 +287,11 @@ async def test_migration_device_online_end_to_end_ignores_other_devices( assert ignored_entity_reg.config_entry_id == other_domain_config_entry.entry_id assert garbage_entity_reg.config_entry_id == legacy_config_entry.entry_id - assert er.async_entries_for_config_entry(entity_reg, legacy_config_entry) == [] - assert dr.async_entries_for_config_entry(device_reg, legacy_config_entry) == [] + assert ( + er.async_entries_for_config_entry(entity_registry, legacy_config_entry) + == [] + ) + assert ( + dr.async_entries_for_config_entry(device_registry, legacy_config_entry) + == [] + ) diff --git a/tests/components/light/test_device_action.py b/tests/components/light/test_device_action.py index 8848ce19621..c2ac7087cf0 100644 --- a/tests/components/light/test_device_action.py +++ b/tests/components/light/test_device_action.py @@ -14,7 +14,7 @@ from homeassistant.components.light import ( LightEntityFeature, ) from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -32,12 +32,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -471,7 +465,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -636,7 +629,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/light/test_device_condition.py b/tests/components/light/test_device_condition.py index 11dea49ea60..94e12ffbfa5 100644 --- a/tests/components/light/test_device_condition.py +++ b/tests/components/light/test_device_condition.py @@ -22,7 +22,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -32,12 +31,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -186,7 +179,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -252,20 +245,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -273,7 +266,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -318,20 +311,20 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_light_entities: list[MockLight], ) -> None: """Test for firing if condition is on with delay.""" @@ -385,26 +378,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/light/test_device_trigger.py b/tests/components/light/test_device_trigger.py index ab3babd1b64..4e8414edabc 100644 --- a/tests/components/light/test_device_trigger.py +++ b/tests/components/light/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) DATA_TEMPLATE_ATTRIBUTES = ( @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -189,7 +182,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -258,20 +251,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -282,7 +275,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -321,13 +314,14 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"turn_on device - {entry.entity_id} - on - off - None" + service_calls[0].data["some"] + == f"turn_on device - {entry.entity_id} - on - off - None" ) @@ -336,7 +330,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -376,16 +370,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/light/test_recorder.py b/tests/components/light/test_recorder.py index 49c9a567856..f3f87ff6074 100644 --- a/tests/components/light/test_recorder.py +++ b/tests/components/light/test_recorder.py @@ -9,12 +9,23 @@ import pytest from homeassistant.components import light from homeassistant.components.light import ( + ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP, + ATTR_COLOR_TEMP_KELVIN, + ATTR_EFFECT, ATTR_EFFECT_LIST, + ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MAX_MIREDS, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_MIN_MIREDS, + ATTR_RGB_COLOR, + ATTR_RGBW_COLOR, + ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, + ATTR_XY_COLOR, + DOMAIN, ) from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -50,7 +61,7 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) await async_wait_recording_done(hass) states = await hass.async_add_executor_job( - get_significant_states, hass, now, None, hass.states.async_entity_ids() + get_significant_states, hass, now, None, hass.states.async_entity_ids(DOMAIN) ) assert len(states) >= 1 for entity_states in states.values(): @@ -62,3 +73,13 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant) assert ATTR_FRIENDLY_NAME in state.attributes assert ATTR_MAX_COLOR_TEMP_KELVIN not in state.attributes assert ATTR_MIN_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_BRIGHTNESS not in state.attributes + assert ATTR_COLOR_MODE not in state.attributes + assert ATTR_COLOR_TEMP not in state.attributes + assert ATTR_COLOR_TEMP_KELVIN not in state.attributes + assert ATTR_EFFECT not in state.attributes + assert ATTR_HS_COLOR not in state.attributes + assert ATTR_RGB_COLOR not in state.attributes + assert ATTR_RGBW_COLOR not in state.attributes + assert ATTR_RGBWW_COLOR not in state.attributes + assert ATTR_XY_COLOR not in state.attributes diff --git a/tests/components/linear_garage_door/conftest.py b/tests/components/linear_garage_door/conftest.py index 306da23ebf9..4ed7662e5d0 100644 --- a/tests/components/linear_garage_door/conftest.py +++ b/tests/components/linear_garage_door/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Linear Garage Door tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.linear_garage_door import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/linear_garage_door/test_diagnostics.py b/tests/components/linear_garage_door/test_diagnostics.py index 6bf7415bde5..a00feed43ff 100644 --- a/tests/components/linear_garage_door/test_diagnostics.py +++ b/tests/components/linear_garage_door/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -25,4 +26,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/linkplay/__init__.py b/tests/components/linkplay/__init__.py new file mode 100644 index 00000000000..5962f7fdaba --- /dev/null +++ b/tests/components/linkplay/__init__.py @@ -0,0 +1 @@ +"""Tests for the LinkPlay integration.""" diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py new file mode 100644 index 00000000000..b3d65422e08 --- /dev/null +++ b/tests/components/linkplay/conftest.py @@ -0,0 +1,40 @@ +"""Test configuration and mocks for LinkPlay component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from linkplay.bridge import LinkPlayBridge, LinkPlayDevice +import pytest + +HOST = "10.0.0.150" +HOST_REENTRY = "10.0.0.66" +UUID = "FF31F09E-5001-FBDE-0546-2DBFFF31F09E" +NAME = "Smart Zone 1_54B9" + + +@pytest.fixture +def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: + """Mock for linkplay_factory_bridge.""" + + with ( + patch( + "homeassistant.components.linkplay.config_flow.linkplay_factory_bridge" + ) as factory, + ): + bridge = AsyncMock(spec=LinkPlayBridge) + bridge.endpoint = HOST + bridge.device = AsyncMock(spec=LinkPlayDevice) + bridge.device.uuid = UUID + bridge.device.name = NAME + factory.return_value = bridge + yield factory + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.linkplay.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py new file mode 100644 index 00000000000..641f09893c2 --- /dev/null +++ b/tests/components/linkplay/test_config_flow.py @@ -0,0 +1,204 @@ +"""Tests for the LinkPlay config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock + +from homeassistant.components.linkplay.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import HOST, HOST_REENTRY, NAME, UUID + +from tests.common import MockConfigEntry + +ZEROCONF_DISCOVERY = ZeroconfServiceInfo( + ip_address=ip_address(HOST), + ip_addresses=[ip_address(HOST)], + hostname=f"{NAME}.local.", + name=f"{NAME}._linkplay._tcp.local.", + port=59152, + type="_linkplay._tcp.local.", + properties={ + "uuid": f"uuid:{UUID}", + "mac": "00:2F:69:01:84:3A", + "security": "https 2.0", + "upnp": "1.0.0", + "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", + }, +) + +ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( + ip_address=ip_address(HOST_REENTRY), + ip_addresses=[ip_address(HOST_REENTRY)], + hostname=f"{NAME}.local.", + name=f"{NAME}._linkplay._tcp.local.", + port=59152, + type="_linkplay._tcp.local.", + properties={ + "uuid": f"uuid:{UUID}", + "mac": "00:2F:69:01:84:3A", + "security": "https 2.0", + "upnp": "1.0.0", + "bootid": "1f347886-1dd2-11b2-86ab-aa0cd2803583", + }, +) + + +async def test_user_flow( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user setup config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID + + +async def test_user_flow_re_entry( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user setup config flow when an entry with the same unique id already exists.""" + + # Create mock entry which already has the same UUID + entry = MockConfigEntry( + data={CONF_HOST: HOST}, + domain=DOMAIN, + title=NAME, + unique_id=UUID, + ) + entry.add_to_hass(hass) + + # Re-create entry with different host + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST_REENTRY}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_zeroconf_flow( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test Zeroconf flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "discovery_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID + + +async def test_zeroconf_flow_re_entry( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test Zeroconf flow when an entry with the same unique id already exists.""" + + # Create mock entry which already has the same UUID + entry = MockConfigEntry( + data={CONF_HOST: HOST}, + domain=DOMAIN, + title=NAME, + unique_id=UUID, + ) + entry.add_to_hass(hass) + + # Re-create entry with different host + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY_RE_ENTRY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_flow_errors( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test flow when the device cannot be reached.""" + + # Temporarily store bridge in a separate variable and set factory to return None + bridge = mock_linkplay_factory_bridge.return_value + mock_linkplay_factory_bridge.return_value = None + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "cannot_connect"} + + # Make linkplay_factory_bridge return a mock bridge again + mock_linkplay_factory_bridge.return_value = bridge + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == NAME + assert result["data"] == { + CONF_HOST: HOST, + } + assert result["result"].unique_id == UUID diff --git a/tests/components/litejet/test_trigger.py b/tests/components/litejet/test_trigger.py index 216084c26bc..b4374652955 100644 --- a/tests/components/litejet/test_trigger.py +++ b/tests/components/litejet/test_trigger.py @@ -14,7 +14,7 @@ import homeassistant.util.dt as dt_util from . import async_init_integration -from tests.common import async_fire_time_changed_exact, async_mock_service +from tests.common import async_fire_time_changed_exact @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -30,12 +30,6 @@ ENTITY_OTHER_SWITCH = "switch.mock_switch_2" ENTITY_OTHER_SWITCH_NUMBER = 2 -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def simulate_press(hass, mock_litejet, number): """Test to simulate a press.""" _LOGGER.info("*** simulate press of %d", number) @@ -101,7 +95,7 @@ async def setup_automation(hass, trigger): async def test_simple( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -111,12 +105,12 @@ async def test_simple( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_only_release( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -125,11 +119,11 @@ async def test_only_release( await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_more_than_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a too short hold.""" await setup_automation( @@ -144,11 +138,11 @@ async def test_held_more_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_more_than_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is long enough.""" await setup_automation( @@ -161,16 +155,16 @@ async def test_held_more_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_held_less_than_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is short enough.""" await setup_automation( @@ -184,14 +178,14 @@ async def test_held_less_than_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=1)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_held_less_than_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test a hold that is too long.""" await setup_automation( @@ -204,15 +198,15 @@ async def test_held_less_than_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=3)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_in_range_short( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too short hold.""" await setup_automation( @@ -228,11 +222,11 @@ async def test_held_in_range_short( await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) await simulate_time(hass, mock_litejet, timedelta(seconds=0.5)) await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_held_in_range_just_right( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a just right hold.""" await setup_automation( @@ -246,16 +240,16 @@ async def test_held_in_range_just_right( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=2)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["id"] == 0 async def test_held_in_range_long( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test an in-range trigger with a too long hold.""" await setup_automation( @@ -269,15 +263,15 @@ async def test_held_in_range_long( ) await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_time(hass, mock_litejet, timedelta(seconds=4)) - assert len(calls) == 0 + assert len(service_calls) == 0 await simulate_release(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_reload( - hass: HomeAssistant, calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet ) -> None: """Test reloading automation.""" await setup_automation( @@ -312,8 +306,8 @@ async def test_reload( await hass.async_block_till_done() await simulate_press(hass, mock_litejet, ENTITY_OTHER_SWITCH_NUMBER) - assert len(calls) == 0 + assert len(service_calls) == 1 await simulate_time(hass, mock_litejet, timedelta(seconds=5)) - assert len(calls) == 0 + assert len(service_calls) == 1 await simulate_time(hass, mock_litejet, timedelta(seconds=12.5)) - assert len(calls) == 1 + assert len(service_calls) == 2 diff --git a/tests/components/local_calendar/conftest.py b/tests/components/local_calendar/conftest.py index 6d2c38544a5..8aef73a9d5a 100644 --- a/tests/components/local_calendar/conftest.py +++ b/tests/components/local_calendar/conftest.py @@ -1,6 +1,6 @@ """Fixtures for local calendar.""" -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from http import HTTPStatus from pathlib import Path from typing import Any @@ -9,7 +9,6 @@ import urllib from aiohttp import ClientWebSocketResponse import pytest -from typing_extensions import Generator from homeassistant.components.local_calendar import LocalCalendarStore from homeassistant.components.local_calendar.const import CONF_CALENDAR_NAME, DOMAIN diff --git a/tests/components/local_calendar/test_diagnostics.py b/tests/components/local_calendar/test_diagnostics.py index ed12391f8a9..30c857dad98 100644 --- a/tests/components/local_calendar/test_diagnostics.py +++ b/tests/components/local_calendar/test_diagnostics.py @@ -7,7 +7,6 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.auth.models import Credentials from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .conftest import TEST_ENTITY, Client @@ -41,12 +40,6 @@ def _get_test_client_generator( return auth_client -@pytest.fixture(autouse=True) -async def setup_diag(hass): - """Set up diagnostics platform.""" - assert await async_setup_component(hass, "diagnostics", {}) - - @freeze_time("2023-03-13 12:05:00-07:00") @pytest.mark.usefixtures("socket_enabled") async def test_empty_calendar( diff --git a/tests/components/local_todo/conftest.py b/tests/components/local_todo/conftest.py index 67ef76172b7..ab73dabb474 100644 --- a/tests/components/local_todo/conftest.py +++ b/tests/components/local_todo/conftest.py @@ -1,11 +1,11 @@ """Common fixtures for the local_todo tests.""" +from collections.abc import Generator from pathlib import Path from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.local_todo import LocalTodoListStore from homeassistant.components.local_todo.const import ( diff --git a/tests/components/local_todo/test_todo.py b/tests/components/local_todo/test_todo.py index e54ee925437..253adebd757 100644 --- a/tests/components/local_todo/test_todo.py +++ b/tests/components/local_todo/test_todo.py @@ -7,7 +7,17 @@ from typing import Any import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from .conftest import TEST_ENTITY @@ -76,17 +86,17 @@ EXPECTED_ADD_ITEM = { ("item_data", "expected_item_data"), [ ({}, EXPECTED_ADD_ITEM), - ({"due_date": "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17"}), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_ADD_ITEM, "due": "2023-11-17T05:30:00-06:00"}, ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_ADD_ITEM, "description": "Additional detail"}, ), - ({"description": ""}, {**EXPECTED_ADD_ITEM, "description": ""}), - ({"description": None}, EXPECTED_ADD_ITEM), + ({ATTR_DESCRIPTION: ""}, {**EXPECTED_ADD_ITEM, "description": ""}), + ({ATTR_DESCRIPTION: None}, EXPECTED_ADD_ITEM), ], ) async def test_add_item( @@ -105,9 +115,9 @@ async def test_add_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -127,12 +137,12 @@ async def test_add_item( ("item_data", "expected_item_data"), [ ({}, {}), - ({"due_date": "2023-11-17"}, {"due": "2023-11-17"}), + ({ATTR_DUE_DATE: "2023-11-17"}, {"due": "2023-11-17"}), ( {"due_datetime": "2023-11-17T11:30:00+00:00"}, {"due": "2023-11-17T05:30:00-06:00"}, ), - ({"description": "Additional detail"}, {"description": "Additional detail"}), + ({ATTR_DESCRIPTION: "Additional detail"}, {"description": "Additional detail"}), ], ) async def test_remove_item( @@ -145,9 +155,9 @@ async def test_remove_item( """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "replace batteries", **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "replace batteries", **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -165,9 +175,9 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": [items[0]["uid"]]}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: [items[0]["uid"]]}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -188,9 +198,9 @@ async def test_bulk_remove( for i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"soda #{i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"soda #{i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -204,9 +214,9 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": uids}, - target={"entity_id": TEST_ENTITY}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: uids}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -227,19 +237,23 @@ EXPECTED_UPDATE_ITEM = { @pytest.mark.parametrize( ("item_data", "expected_item_data", "expected_state"), [ - ({"status": "completed"}, {**EXPECTED_UPDATE_ITEM, "status": "completed"}, "0"), ( - {"due_date": "2023-11-17"}, + {ATTR_STATUS: "completed"}, + {**EXPECTED_UPDATE_ITEM, "status": "completed"}, + "0", + ), + ( + {ATTR_DUE_DATE: "2023-11-17"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17"}, "1", ), ( - {"due_datetime": "2023-11-17T11:30:00+00:00"}, + {ATTR_DUE_DATETIME: "2023-11-17T11:30:00+00:00"}, {**EXPECTED_UPDATE_ITEM, "due": "2023-11-17T05:30:00-06:00"}, "1", ), ( - {"description": "Additional detail"}, + {ATTR_DESCRIPTION: "Additional detail"}, {**EXPECTED_UPDATE_ITEM, "description": "Additional detail"}, "1", ), @@ -258,9 +272,9 @@ async def test_update_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -279,9 +293,9 @@ async def test_update_item( # Update item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -303,7 +317,7 @@ async def test_update_item( ("item_data", "expected_item_data"), [ ( - {"status": "completed"}, + {ATTR_STATUS: "completed"}, { "summary": "soda", "status": "completed", @@ -312,7 +326,7 @@ async def test_update_item( }, ), ( - {"due_date": "2024-01-02"}, + {ATTR_DUE_DATE: "2024-01-02"}, { "summary": "soda", "status": "needs_action", @@ -321,7 +335,7 @@ async def test_update_item( }, ), ( - {"due_date": None}, + {ATTR_DUE_DATE: None}, { "summary": "soda", "status": "needs_action", @@ -329,7 +343,7 @@ async def test_update_item( }, ), ( - {"due_datetime": "2024-01-01 10:30:00"}, + {ATTR_DUE_DATETIME: "2024-01-01 10:30:00"}, { "summary": "soda", "status": "needs_action", @@ -338,7 +352,7 @@ async def test_update_item( }, ), ( - {"due_datetime": None}, + {ATTR_DUE_DATETIME: None}, { "summary": "soda", "status": "needs_action", @@ -346,7 +360,7 @@ async def test_update_item( }, ), ( - {"description": "updated description"}, + {ATTR_DESCRIPTION: "updated description"}, { "summary": "soda", "status": "needs_action", @@ -355,7 +369,7 @@ async def test_update_item( }, ), ( - {"description": None}, + {ATTR_DESCRIPTION: None}, {"summary": "soda", "status": "needs_action", "due": "2024-01-01"}, ), ], @@ -381,9 +395,13 @@ async def test_update_existing_field( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda", "description": "Additional detail", "due_date": "2024-01-01"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + { + ATTR_ITEM: "soda", + ATTR_DESCRIPTION: "Additional detail", + ATTR_DUE_DATE: "2024-01-01", + }, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -398,9 +416,9 @@ async def test_update_existing_field( # Perform update await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], **item_data}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], **item_data}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -424,9 +442,9 @@ async def test_rename( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -444,9 +462,9 @@ async def test_rename( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": item["uid"], "rename": "water"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: item["uid"], ATTR_RENAME: "water"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -501,9 +519,9 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": f"item {i}"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: f"item {i}"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -559,9 +577,9 @@ async def test_move_item_previous_unknown( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "item 1"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "item 1"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -732,9 +750,9 @@ async def test_susbcribe( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -765,9 +783,9 @@ async def test_susbcribe( # Rename item await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": uid, "rename": "milk"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: uid, ATTR_RENAME: "milk"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/lock/conftest.py b/tests/components/lock/conftest.py index f1715687339..fd569b162bc 100644 --- a/tests/components/lock/conftest.py +++ b/tests/components/lock/conftest.py @@ -1,10 +1,10 @@ """Fixtures for the lock entity platform tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.lock import ( DOMAIN as LOCK_DOMAIN, diff --git a/tests/components/lock/test_device_condition.py b/tests/components/lock/test_device_condition.py index 97afe9fb759..74910e1909f 100644 --- a/tests/components/lock/test_device_condition.py +++ b/tests/components/lock/test_device_condition.py @@ -21,11 +21,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -33,12 +29,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -139,7 +129,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -291,52 +281,52 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_locked - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_locked - event - test_event1" hass.states.async_set(entry.entity_id, STATE_UNLOCKED) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_unlocked - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_unlocked - event - test_event2" hass.states.async_set(entry.entity_id, STATE_UNLOCKING) hass.bus.async_fire("test_event3") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_unlocking - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_unlocking - event - test_event3" hass.states.async_set(entry.entity_id, STATE_LOCKING) hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_locking - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_locking - event - test_event4" hass.states.async_set(entry.entity_id, STATE_JAMMED) hass.bus.async_fire("test_event5") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[4].data["some"] == "is_jammed - event - test_event5" + assert len(service_calls) == 5 + assert service_calls[4].data["some"] == "is_jammed - event - test_event5" hass.states.async_set(entry.entity_id, STATE_OPENING) hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_opening - event - test_event6" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_opening - event - test_event6" hass.states.async_set(entry.entity_id, STATE_OPEN) hass.bus.async_fire("test_event7") await hass.async_block_till_done() - assert len(calls) == 7 - assert calls[6].data["some"] == "is_open - event - test_event7" + assert len(service_calls) == 7 + assert service_calls[6].data["some"] == "is_open - event - test_event7" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -380,5 +370,5 @@ async def test_if_state_legacy( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_locked - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_locked - event - test_event1" diff --git a/tests/components/lock/test_device_trigger.py b/tests/components/lock/test_device_trigger.py index 3cbfbb1a04c..f64334fa29b 100644 --- a/tests/components/lock/test_device_trigger.py +++ b/tests/components/lock/test_device_trigger.py @@ -29,7 +29,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -38,12 +37,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -212,7 +205,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -296,27 +289,27 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_UNLOCKED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"unlocked - device - {entry.entity_id} - locked - unlocked - None" ) # Fake that the entity is opens. hass.states.async_set(entry.entity_id, STATE_OPEN) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"open - device - {entry.entity_id} - unlocked - open - None" ) @@ -325,7 +318,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -371,9 +364,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"locked - device - {entry.entity_id} - unlocked - locked - None" ) @@ -382,7 +375,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -516,64 +509,64 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_LOCKED) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - unlocked - locked - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_UNLOCKING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=16)) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 await hass.async_block_till_done() assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"turn_on device - {entry.entity_id} - locked - unlocking - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_JAMMED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 await hass.async_block_till_done() assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"turn_off device - {entry.entity_id} - unlocking - jammed - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_LOCKING) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 await hass.async_block_till_done() assert ( - calls[3].data["some"] + service_calls[3].data["some"] == f"turn_on device - {entry.entity_id} - jammed - locking - 0:00:05" ) hass.states.async_set(entry.entity_id, STATE_OPENING) await hass.async_block_till_done() - assert len(calls) == 4 + assert len(service_calls) == 4 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=27)) await hass.async_block_till_done() - assert len(calls) == 5 + assert len(service_calls) == 5 await hass.async_block_till_done() assert ( - calls[4].data["some"] + service_calls[4].data["some"] == f"turn_on device - {entry.entity_id} - locking - opening - 0:00:05" ) diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index ac653737614..9b1a6bb44cc 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -3,6 +3,7 @@ import asyncio from collections.abc import Callable from datetime import timedelta +from typing import Any from unittest.mock import ANY, patch from freezegun import freeze_time @@ -31,9 +32,10 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) -from homeassistant.core import Event, HomeAssistant, State +from homeassistant.core import Event, HomeAssistant, State, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -2965,3 +2967,79 @@ async def test_subscribe_all_entities_are_continuous_with_device( assert listeners_without_writes( hass.bus.async_listeners() ) == listeners_without_writes(init_listeners) + + +@pytest.mark.parametrize("params", [{"entity_ids": ["binary_sensor.is_light"]}, {}]) +async def test_live_stream_with_changed_state_change( + async_setup_recorder_instance: RecorderInstanceGenerator, + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + params: dict[str, Any], +) -> None: + """Test the live logbook stream with chained events.""" + config = {recorder.CONF_COMMIT_INTERVAL: 0.5} + await async_setup_recorder_instance(hass, config) + now = dt_util.utcnow() + await asyncio.gather( + *[ + async_setup_component(hass, comp, {}) + for comp in ("homeassistant", "logbook") + ] + ) + + hass.states.async_set("binary_sensor.is_light", "ignored") + hass.states.async_set("binary_sensor.is_light", "init") + await async_wait_recording_done(hass) + + @callback + def auto_off_listener(event): + hass.states.async_set("binary_sensor.is_light", STATE_OFF) + + async_track_state_change_event(hass, ["binary_sensor.is_light"], auto_off_listener) + + websocket_client = await hass_ws_client() + init_listeners = hass.bus.async_listeners() + await websocket_client.send_json( + { + "id": 7, + "type": "logbook/event_stream", + "start_time": now.isoformat(), + **params, + } + ) + + msg = await asyncio.wait_for(websocket_client.receive_json(), 2) + assert msg["id"] == 7 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + + await hass.async_block_till_done() + hass.states.async_set("binary_sensor.is_light", STATE_ON) + + recieved_rows = [] + while len(recieved_rows) < 3: + msg = await asyncio.wait_for(websocket_client.receive_json(), 2.5) + assert msg["id"] == 7 + assert msg["type"] == "event" + recieved_rows.extend(msg["event"]["events"]) + + # Make sure we get rows back in order + assert recieved_rows == [ + {"entity_id": "binary_sensor.is_light", "state": "init", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "on", "when": ANY}, + {"entity_id": "binary_sensor.is_light", "state": "off", "when": ANY}, + ] + + await websocket_client.send_json( + {"id": 8, "type": "unsubscribe_events", "subscription": 7} + ) + msg = await asyncio.wait_for(websocket_client.receive_json(), 2) + + assert msg["id"] == 8 + assert msg["type"] == TYPE_RESULT + assert msg["success"] + + # Check our listener got unsubscribed + assert listeners_without_writes( + hass.bus.async_listeners() + ) == listeners_without_writes(init_listeners) diff --git a/tests/components/logi_circle/test_config_flow.py b/tests/components/logi_circle/test_config_flow.py index 2525354598d..ab4bae02ad6 100644 --- a/tests/components/logi_circle/test_config_flow.py +++ b/tests/components/logi_circle/test_config_flow.py @@ -1,8 +1,10 @@ """Tests for Logi Circle config flow.""" import asyncio +from collections.abc import Generator from http import HTTPStatus -from unittest.mock import AsyncMock, Mock, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -24,13 +26,13 @@ from tests.common import MockConfigEntry class MockRequest: """Mock request passed to HomeAssistantView.""" - def __init__(self, hass, query): + def __init__(self, hass: HomeAssistant, query: dict[str, Any]) -> None: """Init request object.""" self.app = {KEY_HASS: hass} self.query = query -def init_config_flow(hass): +def init_config_flow(hass: HomeAssistant) -> config_flow.LogiCircleFlowHandler: """Init a configuration flow.""" config_flow.register_flow_implementation( hass, @@ -48,7 +50,7 @@ def init_config_flow(hass): @pytest.fixture -def mock_logi_circle(): +def mock_logi_circle() -> Generator[MagicMock]: """Mock logi_circle.""" with patch( "homeassistant.components.logi_circle.config_flow.LogiCircle" @@ -63,7 +65,8 @@ def mock_logi_circle(): yield LogiCircle -async def test_step_import(hass: HomeAssistant, mock_logi_circle) -> None: +@pytest.mark.usefixtures("mock_logi_circle") +async def test_step_import(hass: HomeAssistant) -> None: """Test that we trigger import when configuring with client.""" flow = init_config_flow(hass) @@ -72,7 +75,8 @@ async def test_step_import(hass: HomeAssistant, mock_logi_circle) -> None: assert result["step_id"] == "auth" -async def test_full_flow_implementation(hass: HomeAssistant, mock_logi_circle) -> None: +@pytest.mark.usefixtures("mock_logi_circle") +async def test_full_flow_implementation(hass: HomeAssistant) -> None: """Test registering an implementation and finishing flow works.""" config_flow.register_flow_implementation( hass, @@ -154,7 +158,10 @@ async def test_abort_if_already_setup(hass: HomeAssistant) -> None: ], ) async def test_abort_if_authorize_fails( - hass: HomeAssistant, mock_logi_circle, side_effect, error + hass: HomeAssistant, + mock_logi_circle: MagicMock, + side_effect: type[Exception], + error: str, ) -> None: """Test we abort if authorizing fails.""" flow = init_config_flow(hass) @@ -177,7 +184,8 @@ async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: assert result["step_id"] == "auth" -async def test_gen_auth_url(hass: HomeAssistant, mock_logi_circle) -> None: +@pytest.mark.usefixtures("mock_logi_circle") +async def test_gen_auth_url(hass: HomeAssistant) -> None: """Test generating authorize URL from Logi Circle API.""" config_flow.register_flow_implementation( hass, @@ -206,7 +214,7 @@ async def test_callback_view_rejects_missing_code(hass: HomeAssistant) -> None: async def test_callback_view_accepts_code( - hass: HomeAssistant, mock_logi_circle + hass: HomeAssistant, mock_logi_circle: MagicMock ) -> None: """Test the auth callback view handles requests with auth code.""" init_config_flow(hass) diff --git a/tests/components/logi_circle/test_init.py b/tests/components/logi_circle/test_init.py index f8bf8306609..d953acdf744 100644 --- a/tests/components/logi_circle/test_init.py +++ b/tests/components/logi_circle/test_init.py @@ -1,7 +1,8 @@ """Tests for the Logi Circle integration.""" import asyncio -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -14,14 +15,14 @@ from tests.common import MockConfigEntry @pytest.fixture(name="disable_platforms") -async def disable_platforms_fixture(hass): +def disable_platforms_fixture() -> Generator[None]: """Disable logi_circle platforms.""" with patch("homeassistant.components.logi_circle.PLATFORMS", []): yield @pytest.fixture -def mock_logi_circle(): +def mock_logi_circle() -> Generator[MagicMock]: """Mock logi_circle.""" auth_provider_mock = Mock() @@ -37,11 +38,10 @@ def mock_logi_circle(): yield LogiCircle +@pytest.mark.usefixtures("disable_platforms", "mock_logi_circle") async def test_repair_issue( hass: HomeAssistant, issue_registry: ir.IssueRegistry, - disable_platforms, - mock_logi_circle, ) -> None: """Test the LogiCircle configuration entry loading/unloading handles the repair.""" config_entry = MockConfigEntry( diff --git a/tests/components/loqed/conftest.py b/tests/components/loqed/conftest.py index 57ef19d0fcb..ddad8949d7d 100644 --- a/tests/components/loqed/conftest.py +++ b/tests/components/loqed/conftest.py @@ -1,12 +1,12 @@ """Contains fixtures for Loqed tests.""" +from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, Mock, patch from loqedAPI import loqed import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.loqed import DOMAIN from homeassistant.components.loqed.const import CONF_CLOUDHOOK_URL diff --git a/tests/components/lovelace/test_cast.py b/tests/components/lovelace/test_cast.py index 632ea731d0c..c54b31d9297 100644 --- a/tests/components/lovelace/test_cast.py +++ b/tests/components/lovelace/test_cast.py @@ -1,10 +1,10 @@ """Test the Lovelace Cast platform.""" +from collections.abc import AsyncGenerator, Generator from time import time from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.lovelace import cast as lovelace_cast from homeassistant.components.media_player import MediaClass @@ -30,7 +30,7 @@ def mock_onboarding_done() -> Generator[MagicMock]: @pytest.fixture -async def mock_https_url(hass): +async def mock_https_url(hass: HomeAssistant) -> None: """Mock valid URL.""" await async_process_ha_core_config( hass, @@ -39,7 +39,7 @@ async def mock_https_url(hass): @pytest.fixture -async def mock_yaml_dashboard(hass): +async def mock_yaml_dashboard(hass: HomeAssistant) -> AsyncGenerator[None]: """Mock the content of a YAML dashboard.""" # Set up a YAML dashboard with 2 views. assert await async_setup_component( @@ -116,9 +116,8 @@ async def test_browse_media_error(hass: HomeAssistant) -> None: ) -async def test_browse_media( - hass: HomeAssistant, mock_yaml_dashboard, mock_https_url -) -> None: +@pytest.mark.usefixtures("mock_yaml_dashboard", "mock_https_url") +async def test_browse_media(hass: HomeAssistant) -> None: """Test browse media.""" top_level_items = await lovelace_cast.async_browse_media( hass, "lovelace", "", lovelace_cast.CAST_TYPE_CHROMECAST @@ -181,7 +180,8 @@ async def test_browse_media( ) -async def test_play_media(hass: HomeAssistant, mock_yaml_dashboard) -> None: +@pytest.mark.usefixtures("mock_yaml_dashboard") +async def test_play_media(hass: HomeAssistant) -> None: """Test playing media.""" calls = async_mock_service(hass, "cast", "show_lovelace_view") diff --git a/tests/components/lovelace/test_dashboard.py b/tests/components/lovelace/test_dashboard.py index 7577c4dcc0d..3a01e20c1fb 100644 --- a/tests/components/lovelace/test_dashboard.py +++ b/tests/components/lovelace/test_dashboard.py @@ -1,11 +1,11 @@ """Test the Lovelace initialization.""" +from collections.abc import Generator import time from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import frontend from homeassistant.components.lovelace import const, dashboard diff --git a/tests/components/lovelace/test_init.py b/tests/components/lovelace/test_init.py index dc111ab601e..14d93d8302f 100644 --- a/tests/components/lovelace/test_init.py +++ b/tests/components/lovelace/test_init.py @@ -1,10 +1,10 @@ """Test the Lovelace initialization.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/lovelace/test_system_health.py b/tests/components/lovelace/test_system_health.py index d53ebf2871f..4fe248fa950 100644 --- a/tests/components/lovelace/test_system_health.py +++ b/tests/components/lovelace/test_system_health.py @@ -1,10 +1,10 @@ """Tests for Lovelace system health.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.lovelace import dashboard from homeassistant.core import HomeAssistant diff --git a/tests/components/luftdaten/conftest.py b/tests/components/luftdaten/conftest.py index e1aac7caeb0..c3daa390e49 100644 --- a/tests/components/luftdaten/conftest.py +++ b/tests/components/luftdaten/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.luftdaten.const import CONF_SENSOR_ID, DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/lutron/conftest.py b/tests/components/lutron/conftest.py index 90f96f1783d..f2106f736dc 100644 --- a/tests/components/lutron/conftest.py +++ b/tests/components/lutron/conftest.py @@ -1,9 +1,9 @@ """Provide common Lutron fixtures and mocks.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 208dd36cccd..405c504dee1 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -39,11 +39,7 @@ from homeassistant.setup import async_setup_component from . import MockBridge -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations MOCK_BUTTON_DEVICES = [ { @@ -102,12 +98,6 @@ MOCK_BUTTON_DEVICES = [ ] -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def _async_setup_lutron_with_picos(hass): """Setups a lutron bridge with picos.""" config_entry = MockConfigEntry( @@ -135,7 +125,11 @@ async def _async_setup_lutron_with_picos(hass): async def test_get_triggers(hass: HomeAssistant) -> None: """Test we get the expected triggers from a lutron pico.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads device_id = keypads[list(keypads)[0]]["dr_device_id"] @@ -220,7 +214,9 @@ async def test_none_serial_keypad( async def test_if_fires_on_button_event( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing.""" await _async_setup_lutron_with_picos(hass) @@ -266,12 +262,14 @@ async def test_if_fires_on_button_event( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" async def test_if_fires_on_button_event_without_lip( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing on a device that does not support lip.""" await _async_setup_lutron_with_picos(hass) @@ -315,12 +313,12 @@ async def test_if_fires_on_button_event_without_lip( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" async def test_validate_trigger_config_no_device( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for no press with no device.""" @@ -356,16 +354,20 @@ async def test_validate_trigger_config_no_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_validate_trigger_config_unknown_device( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for no press with an unknown device.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -404,7 +406,7 @@ async def test_validate_trigger_config_unknown_device( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_validate_trigger_invalid_triggers( @@ -412,7 +414,11 @@ async def test_validate_trigger_invalid_triggers( ) -> None: """Test for click_event with invalid triggers.""" config_entry_id = await _async_setup_lutron_with_picos(hass) - data: LutronCasetaData = hass.data[DOMAIN][config_entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry_id + ).runtime_data keypads = data.keypad_data.keypads lutron_device_id = list(keypads)[0] keypad = keypads[lutron_device_id] @@ -444,7 +450,9 @@ async def test_validate_trigger_invalid_triggers( async def test_if_fires_on_button_event_late_setup( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for press trigger firing with integration getting setup late.""" config_entry_id = await _async_setup_lutron_with_picos(hass) @@ -495,5 +503,5 @@ async def test_if_fires_on_button_event_late_setup( hass.bus.async_fire(LUTRON_CASETA_BUTTON_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" diff --git a/tests/components/lutron_caseta/test_logbook.py b/tests/components/lutron_caseta/test_logbook.py index b6e8840c85c..9a58838d65c 100644 --- a/tests/components/lutron_caseta/test_logbook.py +++ b/tests/components/lutron_caseta/test_logbook.py @@ -53,7 +53,11 @@ async def test_humanify_lutron_caseta_button_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() - data: LutronCasetaData = hass.data[DOMAIN][config_entry.entry_id] + # Fetching the config entry runtime_data is a legacy pattern + # and should not be copied for new integrations + data: LutronCasetaData = hass.config_entries.async_get_entry( + config_entry.entry_id + ).runtime_data keypads = data.keypad_data.keypads keypad = keypads["9"] dr_device_id = keypad["dr_device_id"] diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index e1a8d1131dc..1e0ae04f741 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -26,7 +26,7 @@ CLIENT_SECRET = "5678" @pytest.fixture -async def mock_impl(hass): +async def mock_impl(hass: HomeAssistant) -> None: """Mock implementation.""" await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() @@ -45,12 +45,11 @@ async def test_abort_if_no_configuration(hass: HomeAssistant) -> None: assert result["reason"] == "missing_credentials" -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_impl") async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_impl, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -112,12 +111,11 @@ async def test_full_flow( assert len(mock_setup.mock_calls) == 1 -@pytest.mark.usefixtures("current_request_with_host") +@pytest.mark.usefixtures("current_request_with_host", "mock_impl") async def test_reauthentication_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - mock_impl, ) -> None: """Test reauthentication flow.""" old_entry = MockConfigEntry( diff --git a/tests/components/madvr/__init__.py b/tests/components/madvr/__init__.py new file mode 100644 index 00000000000..343dd68a25d --- /dev/null +++ b/tests/components/madvr/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the madvr-envy integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/madvr/conftest.py b/tests/components/madvr/conftest.py new file mode 100644 index 00000000000..187786c6964 --- /dev/null +++ b/tests/components/madvr/conftest.py @@ -0,0 +1,87 @@ +"""MadVR conftest for shared testing setup.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +import pytest + +from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN +from homeassistant.const import CONF_HOST, CONF_PORT + +from .const import MOCK_CONFIG, MOCK_MAC + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.madvr.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_madvr_client() -> Generator[AsyncMock]: + """Mock a MadVR client.""" + with ( + patch( + "homeassistant.components.madvr.config_flow.Madvr", autospec=True + ) as mock_client, + patch("homeassistant.components.madvr.Madvr", new=mock_client), + ): + client = mock_client.return_value + client.host = MOCK_CONFIG[CONF_HOST] + client.port = MOCK_CONFIG[CONF_PORT] + client.mac_address = MOCK_MAC + client.connected.return_value = True + client.is_device_connectable.return_value = True + client.loop = AsyncMock() + client.tasks = AsyncMock() + client.set_update_callback = MagicMock() + + # mock the property to be off on startup (which it is) + is_on_mock = PropertyMock(return_value=True) + type(client).is_on = is_on_mock + + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + unique_id=MOCK_MAC, + title=DEFAULT_NAME, + ) + + +def get_update_callback(mock_client: MagicMock): + """Retrieve the update callback function from the mocked client. + + This function extracts the callback that was passed to set_update_callback + on the mocked MadVR client. This callback is typically the handle_push_data + method of the MadVRCoordinator. + + Args: + mock_client (MagicMock): The mocked MadVR client. + + Returns: + function: The update callback function. + + """ + # Get all the calls made to set_update_callback + calls = mock_client.set_update_callback.call_args_list + + if not calls: + raise ValueError("set_update_callback was not called on the mock client") + + # Get the first (and usually only) call + first_call = calls[0] + + # Get the first argument of this call, which should be the callback function + return first_call.args[0] diff --git a/tests/components/madvr/const.py b/tests/components/madvr/const.py new file mode 100644 index 00000000000..8c5e122377b --- /dev/null +++ b/tests/components/madvr/const.py @@ -0,0 +1,18 @@ +"""Constants for the MadVR tests.""" + +from homeassistant.const import CONF_HOST, CONF_PORT + +MOCK_CONFIG = { + CONF_HOST: "192.168.1.1", + CONF_PORT: 44077, +} + +MOCK_MAC = "00:11:22:33:44:55" + +TEST_CON_ERROR = ConnectionError("Connection failed") +TEST_IMP_ERROR = NotImplementedError("Not implemented") + +TEST_FAILED_ON = "Failed to turn on device" +TEST_FAILED_OFF = "Failed to turn off device" +TEST_FAILED_CMD = "Failed to send command" +TEST_COMMAND = "test" diff --git a/tests/components/madvr/snapshots/test_binary_sensors.ambr b/tests/components/madvr/snapshots/test_binary_sensors.ambr new file mode 100644 index 00000000000..7fd54a7c240 --- /dev/null +++ b/tests/components/madvr/snapshots/test_binary_sensors.ambr @@ -0,0 +1,185 @@ +# serializer version: 1 +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'HDR flag', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hdr_flag', + 'unique_id': '00:11:22:33:44:55_hdr_flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_hdr_flag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy HDR flag', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_hdr_flag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing HDR flag', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_hdr_flag', + 'unique_id': '00:11:22:33:44:55_outgoing_hdr_flag', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_outgoing_hdr_flag-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing HDR flag', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_outgoing_hdr_flag', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_power_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Power state', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_state', + 'unique_id': '00:11:22:33:44:55_power_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_power_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Power state', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_power_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.madvr_envy_signal_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal state', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_state', + 'unique_id': '00:11:22:33:44:55_signal_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor_setup[binary_sensor.madvr_envy_signal_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Signal state', + }), + 'context': , + 'entity_id': 'binary_sensor.madvr_envy_signal_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/madvr/snapshots/test_remote.ambr b/tests/components/madvr/snapshots/test_remote.ambr new file mode 100644 index 00000000000..1157496a93e --- /dev/null +++ b/tests/components/madvr/snapshots/test_remote.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_remote_setup[remote.madvr_envy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'remote', + 'entity_category': None, + 'entity_id': 'remote.madvr_envy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:11:22:33:44:55', + 'unit_of_measurement': None, + }) +# --- +# name: test_remote_setup[remote.madvr_envy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy', + 'supported_features': , + }), + 'context': , + 'entity_id': 'remote.madvr_envy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/madvr/snapshots/test_sensors.ambr b/tests/components/madvr/snapshots/test_sensors.ambr new file mode 100644 index 00000000000..7b0dd254f77 --- /dev/null +++ b/tests/components/madvr/snapshots/test_sensors.ambr @@ -0,0 +1,1359 @@ +# serializer version: 1 +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_decimal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect decimal', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_dec', + 'unique_id': '00:11:22:33:44:55_aspect_dec', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_decimal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect decimal', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_decimal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.78', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_integer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect integer', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_int', + 'unique_id': '00:11:22:33:44:55_aspect_int', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_integer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect integer', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_integer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_name', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect name', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_name', + 'unique_id': '00:11:22:33:44:55_aspect_name', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_name-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect name', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_name', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Widescreen', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_aspect_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Aspect resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'aspect_res', + 'unique_id': '00:11:22:33:44:55_aspect_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_aspect_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Aspect resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_aspect_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840:2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_cpu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'CPU temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_cpu', + 'unique_id': '00:11:22:33:44:55_temp_cpu', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_cpu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy CPU temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_cpu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_gpu_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GPU temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_gpu', + 'unique_id': '00:11:22:33:44:55_temp_gpu', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_gpu_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy GPU temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_gpu_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45.5', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_hdmi_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'HDMI temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_hdmi', + 'unique_id': '00:11:22:33:44:55_temp_hdmi', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_hdmi_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy HDMI temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_hdmi_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.0', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '16:9', + '4:3', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming aspect ratio', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_aspect_ratio', + 'unique_id': '00:11:22:33:44:55_incoming_aspect_ratio', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_aspect_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming aspect ratio', + 'options': list([ + '16:9', + '4:3', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_aspect_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16:9', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming bit depth', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_bit_depth', + 'unique_id': '00:11:22:33:44:55_incoming_bit_depth', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_bit_depth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming bit depth', + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_bit_depth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10bit', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_black_levels', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming black levels', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_black_levels', + 'unique_id': '00:11:22:33:44:55_incoming_black_levels', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_black_levels-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming black levels', + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_black_levels', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'PC', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_color_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming color space', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_color_space', + 'unique_id': '00:11:22:33:44:55_incoming_color_space', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_color_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming color space', + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_color_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'RGB', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming colorimetry', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_colorimetry', + 'unique_id': '00:11:22:33:44:55_incoming_colorimetry', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_colorimetry-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming colorimetry', + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_colorimetry', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Incoming frame rate', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_frame_rate', + 'unique_id': '00:11:22:33:44:55_incoming_frame_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_frame_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Incoming frame rate', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_frame_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60p', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Incoming resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_res', + 'unique_id': '00:11:22:33:44:55_incoming_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Incoming resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840x2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '2D', + '3D', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_incoming_signal_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Incoming signal type', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'incoming_signal_type', + 'unique_id': '00:11:22:33:44:55_incoming_signal_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_incoming_signal_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Incoming signal type', + 'options': list([ + '2D', + '3D', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_incoming_signal_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3D', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_mainboard_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Mainboard temperature', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temp_mainboard', + 'unique_id': '00:11:22:33:44:55_temp_mainboard', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_mainboard_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'madVR Envy Mainboard temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_mainboard_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.8', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_decimal', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking decimal', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_dec', + 'unique_id': '00:11:22:33:44:55_masking_dec', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_decimal-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking decimal', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_decimal', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.78', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_integer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking integer', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_int', + 'unique_id': '00:11:22:33:44:55_masking_int', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_integer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking integer', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_integer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_masking_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Masking resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'masking_res', + 'unique_id': '00:11:22:33:44:55_masking_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_masking_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Masking resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_masking_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840:2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing bit depth', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_bit_depth', + 'unique_id': '00:11:22:33:44:55_outgoing_bit_depth', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_bit_depth-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing bit depth', + 'options': list([ + '8bit', + '10bit', + '12bit', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_bit_depth', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10bit', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing black levels', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_black_levels', + 'unique_id': '00:11:22:33:44:55_outgoing_black_levels', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_black_levels-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing black levels', + 'options': list([ + 'TV', + 'PC', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_black_levels', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'PC', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_color_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing color space', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_color_space', + 'unique_id': '00:11:22:33:44:55_outgoing_color_space', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_color_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing color space', + 'options': list([ + 'RGB', + '444', + '422', + '420', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_color_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'RGB', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing colorimetry', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_colorimetry', + 'unique_id': '00:11:22:33:44:55_outgoing_colorimetry', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_colorimetry-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing colorimetry', + 'options': list([ + 'SDR', + 'HDR10', + 'HLG 601', + 'PAL', + '709', + 'DCI', + '2020', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_colorimetry', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing frame rate', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_frame_rate', + 'unique_id': '00:11:22:33:44:55_outgoing_frame_rate', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_frame_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing frame rate', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_frame_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60p', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_resolution', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Outgoing resolution', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_res', + 'unique_id': '00:11:22:33:44:55_outgoing_res', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_resolution-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'madVR Envy Outgoing resolution', + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_resolution', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3840x2160', + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + '2D', + '3D', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outgoing signal type', + 'platform': 'madvr', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outgoing_signal_type', + 'unique_id': '00:11:22:33:44:55_outgoing_signal_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup_and_states[sensor.madvr_envy_outgoing_signal_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'madVR Envy Outgoing signal type', + 'options': list([ + '2D', + '3D', + ]), + }), + 'context': , + 'entity_id': 'sensor.madvr_envy_outgoing_signal_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2D', + }) +# --- diff --git a/tests/components/madvr/test_binary_sensors.py b/tests/components/madvr/test_binary_sensors.py new file mode 100644 index 00000000000..469a3225ca0 --- /dev/null +++ b/tests/components/madvr/test_binary_sensors.py @@ -0,0 +1,79 @@ +"""Tests for the MadVR binary sensor entities.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_binary_sensor_setup( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the binary sensor entities.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.BINARY_SENSOR]): + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "positive_payload", "negative_payload"), + [ + ( + "binary_sensor.madvr_envy_power_state", + {"is_on": True}, + {"is_on": False}, + ), + ( + "binary_sensor.madvr_envy_signal_state", + {"is_signal": True}, + {"is_signal": False}, + ), + ( + "binary_sensor.madvr_envy_hdr_flag", + {"hdr_flag": True}, + {"hdr_flag": False}, + ), + ( + "binary_sensor.madvr_envy_outgoing_hdr_flag", + {"outgoing_hdr_flag": True}, + {"outgoing_hdr_flag": False}, + ), + ], +) +async def test_binary_sensors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_id: str, + positive_payload: dict, + negative_payload: dict, +) -> None: + """Test the binary sensors.""" + await setup_integration(hass, mock_config_entry) + update_callback = get_update_callback(mock_madvr_client) + + # Test positive state + update_callback(positive_payload) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + # Test negative state + update_callback(negative_payload) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_OFF diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py new file mode 100644 index 00000000000..6dc84fd6b00 --- /dev/null +++ b/tests/components/madvr/test_config_flow.py @@ -0,0 +1,128 @@ +"""Tests for the MadVR config flow.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONFIG, MOCK_MAC + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def avoid_wait() -> AsyncGenerator[None]: + """Mock sleep.""" + with patch("homeassistant.components.madvr.config_flow.RETRY_INTERVAL", 0): + yield + + +async def test_full_flow( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_HOST: MOCK_CONFIG[CONF_HOST], + CONF_PORT: MOCK_CONFIG[CONF_PORT], + } + assert result["result"].unique_id == MOCK_MAC + mock_madvr_client.open_connection.assert_called_once() + mock_madvr_client.async_add_tasks.assert_called_once() + mock_madvr_client.async_cancel_tasks.assert_called_once() + + +async def test_flow_errors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test error handling in config flow.""" + mock_madvr_client.open_connection.side_effect = TimeoutError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_madvr_client.open_connection.side_effect = None + mock_madvr_client.connected = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + mock_madvr_client.connected = True + mock_madvr_client.mac_address = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_mac"} + + # ensure an error is recoverable + mock_madvr_client.mac_address = MOCK_MAC + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["data"] == { + CONF_HOST: MOCK_CONFIG[CONF_HOST], + CONF_PORT: MOCK_CONFIG[CONF_PORT], + } + + # Verify method calls + assert mock_madvr_client.open_connection.call_count == 4 + assert mock_madvr_client.async_add_tasks.call_count == 2 + # the first call will not call this due to timeout as expected + assert mock_madvr_client.async_cancel_tasks.call_count == 2 + + +async def test_duplicate( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate config entries.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: MOCK_CONFIG[CONF_HOST], CONF_PORT: MOCK_CONFIG[CONF_PORT]}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/madvr/test_init.py b/tests/components/madvr/test_init.py new file mode 100644 index 00000000000..dace812af11 --- /dev/null +++ b/tests/components/madvr/test_init.py @@ -0,0 +1,28 @@ +"""Tests for the MadVR integration.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_remove(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/madvr/test_remote.py b/tests/components/madvr/test_remote.py new file mode 100644 index 00000000000..6fc507534d6 --- /dev/null +++ b/tests/components/madvr/test_remote.py @@ -0,0 +1,155 @@ +"""Tests for the MadVR remote entity.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.remote import ( + DOMAIN as REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, +) +from homeassistant.const import ( + ATTR_COMMAND, + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .const import ( + TEST_COMMAND, + TEST_CON_ERROR, + TEST_FAILED_CMD, + TEST_FAILED_OFF, + TEST_FAILED_ON, + TEST_IMP_ERROR, +) + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_remote_setup( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test setup of the remote entity.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.REMOTE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_remote_power( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test turning on the remote entity.""" + + await setup_integration(hass, mock_config_entry) + + entity_id = "remote.madvr_envy" + remote = hass.states.get(entity_id) + assert remote.state == STATE_ON + + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + + mock_madvr_client.power_off.assert_called_once() + + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + + mock_madvr_client.power_on.assert_called_once() + + # cover exception cases + caplog.clear() + mock_madvr_client.power_off.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_OFF in caplog.text + + # Test turning off with NotImplementedError + caplog.clear() + mock_madvr_client.power_off.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_OFF in caplog.text + + # Reset side_effect for power_off + mock_madvr_client.power_off.side_effect = None + + # Test turning on with ConnectionError + caplog.clear() + mock_madvr_client.power_on.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_ON in caplog.text + + # Test turning on with NotImplementedError + caplog.clear() + mock_madvr_client.power_on.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert TEST_FAILED_ON in caplog.text + + +async def test_send_command( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sending command to the remote entity.""" + + await setup_integration(hass, mock_config_entry) + + entity_id = "remote.madvr_envy" + remote = hass.states.get(entity_id) + assert remote.state == STATE_ON + + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + + mock_madvr_client.add_command_to_queue.assert_called_once_with([TEST_COMMAND]) + # cover exceptions + # Test ConnectionError + mock_madvr_client.add_command_to_queue.side_effect = TEST_CON_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + assert TEST_FAILED_CMD in caplog.text + + # Test NotImplementedError + mock_madvr_client.add_command_to_queue.side_effect = TEST_IMP_ERROR + await hass.services.async_call( + REMOTE_DOMAIN, + SERVICE_SEND_COMMAND, + {ATTR_ENTITY_ID: entity_id, ATTR_COMMAND: TEST_COMMAND}, + blocking=True, + ) + assert TEST_FAILED_CMD in caplog.text diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensors.py new file mode 100644 index 00000000000..25dcc1cdcca --- /dev/null +++ b/tests/components/madvr/test_sensors.py @@ -0,0 +1,95 @@ +"""Tests for the MadVR sensor entities.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.madvr.sensor import get_temperature +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_setup_and_states( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_madvr_client: AsyncMock, +) -> None: + """Test setup of the sensor entities and their states.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + update_callback = get_update_callback(mock_madvr_client) + + # Create a big data update with all sensor values + update_data = { + "temp_gpu": 45.5, + "temp_hdmi": 40.0, + "temp_cpu": 50.2, + "temp_mainboard": 35.8, + "incoming_res": "3840x2160", + "incoming_frame_rate": "60p", + "outgoing_signal_type": "2D", + "incoming_signal_type": "3D", + "incoming_color_space": "RGB", + "incoming_bit_depth": "10bit", + "incoming_colorimetry": "2020", + "incoming_black_levels": "PC", + "incoming_aspect_ratio": "16:9", + "outgoing_res": "3840x2160", + "outgoing_frame_rate": "60p", + "outgoing_color_space": "RGB", + "outgoing_bit_depth": "10bit", + "outgoing_colorimetry": "2020", + "outgoing_black_levels": "PC", + "aspect_res": "3840:2160", + "aspect_dec": "1.78", + "aspect_int": "178", + "aspect_name": "Widescreen", + "masking_res": "3840:2160", + "masking_dec": "1.78", + "masking_int": "178", + } + + # Update all sensors at once + update_callback(update_data) + await hass.async_block_till_done() + + # Snapshot all entity states + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Test invalid temperature value + update_callback({"temp_gpu": -1}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == STATE_UNKNOWN + + # Test sensor unknown + update_callback({"incoming_res": None}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_incoming_resolution").state == STATE_UNKNOWN + ) + + # Test sensor becomes known again + update_callback({"incoming_res": "1920x1080"}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_incoming_resolution").state == "1920x1080" + + # Test temperature sensor + update_callback({"temp_gpu": 41.2}) + await hass.async_block_till_done() + assert hass.states.get("sensor.madvr_envy_gpu_temperature").state == "41.2" + + # test get_temperature ValueError + assert get_temperature(None, "temp_key") is None diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py index 31e831c3bae..6fcf9176aae 100644 --- a/tests/components/mailbox/test_init.py +++ b/tests/components/mailbox/test_init.py @@ -8,11 +8,11 @@ from typing import Any from aiohttp.test_utils import TestClient import pytest -from homeassistant.bootstrap import async_setup_component from homeassistant.components import mailbox from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from tests.common import MockModule, mock_integration, mock_platform diff --git a/tests/components/map/test_init.py b/tests/components/map/test_init.py index afafdd1eb16..217550852bd 100644 --- a/tests/components/map/test_init.py +++ b/tests/components/map/test_init.py @@ -1,10 +1,10 @@ """Test the Map initialization.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.map import DOMAIN from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant diff --git a/tests/components/mastodon/__init__.py b/tests/components/mastodon/__init__.py new file mode 100644 index 00000000000..a4c730db07a --- /dev/null +++ b/tests/components/mastodon/__init__.py @@ -0,0 +1,13 @@ +"""Tests for the Mastodon integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py new file mode 100644 index 00000000000..03c3e754c11 --- /dev/null +++ b/tests/components/mastodon/conftest.py @@ -0,0 +1,57 @@ +"""Mastodon tests configuration.""" + +from collections.abc import Generator +from unittest.mock import patch + +import pytest + +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET + +from tests.common import MockConfigEntry, load_json_object_fixture +from tests.components.smhi.common import AsyncMock + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.mastodon.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_mastodon_client() -> Generator[AsyncMock]: + """Mock a Mastodon client.""" + with ( + patch( + "homeassistant.components.mastodon.utils.Mastodon", + autospec=True, + ) as mock_client, + ): + client = mock_client.return_value + client.instance.return_value = load_json_object_fixture("instance.json", DOMAIN) + client.account_verify_credentials.return_value = load_json_object_fixture( + "account_verify_credentials.json", DOMAIN + ) + client.status_post.return_value = None + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="@trwnh@mastodon.social", + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + entry_id="01J35M4AH9HYRC2V0G6RNVNWJH", + unique_id="client_id", + ) diff --git a/tests/components/mastodon/fixtures/account_verify_credentials.json b/tests/components/mastodon/fixtures/account_verify_credentials.json new file mode 100644 index 00000000000..401caa121ae --- /dev/null +++ b/tests/components/mastodon/fixtures/account_verify_credentials.json @@ -0,0 +1,78 @@ +{ + "id": "14715", + "username": "trwnh", + "acct": "trwnh", + "display_name": "infinite love ⴳ", + "locked": false, + "bot": false, + "created_at": "2016-11-24T10:02:12.085Z", + "note": "

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
https://trwnh.com
help me live: https://liberapay.com/at or https://paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

", + "url": "https://mastodon.social/@trwnh", + "avatar": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png", + "header": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "header_static": "https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg", + "followers_count": 821, + "following_count": 178, + "statuses_count": 33120, + "last_status_at": "2019-11-24T15:49:42.251Z", + "source": { + "privacy": "public", + "sensitive": false, + "language": "", + "note": "i have approximate knowledge of many things. perpetual student. (nb/ace/they)\r\n\r\nxmpp/email: a@trwnh.com\r\nhttps://trwnh.com\r\nhelp me live: https://liberapay.com/at or https://paypal.me/trwnh\r\n\r\n- my triggers are moths and glitter\r\n- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise\r\n- dm me if i did something wrong, so i can improve\r\n- purest person on fedi, do not lewd in my presence\r\n- #1 ami cole fan account\r\n\r\n:fatyoshi:", + "fields": [ + { + "name": "Website", + "value": "https://trwnh.com", + "verified_at": "2019-08-29T04:14:55.571+00:00" + }, + { + "name": "Sponsor", + "value": "https://liberapay.com/at", + "verified_at": "2019-11-15T10:06:15.557+00:00" + }, + { + "name": "Fan of:", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "verified_at": null + }, + { + "name": "Main topics:", + "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "verified_at": null + } + ], + "follow_requests_count": 0 + }, + "emojis": [ + { + "shortcode": "fatyoshi", + "url": "https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png", + "static_url": "https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png", + "visible_in_picker": true + } + ], + "fields": [ + { + "name": "Website", + "value": "https://trwnh.com", + "verified_at": "2019-08-29T04:14:55.571+00:00" + }, + { + "name": "Sponsor", + "value": "https://liberapay.com/at", + "verified_at": "2019-11-15T10:06:15.557+00:00" + }, + { + "name": "Fan of:", + "value": "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + "verified_at": null + }, + { + "name": "Main topics:", + "value": "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + "verified_at": null + } + ] +} diff --git a/tests/components/mastodon/fixtures/instance.json b/tests/components/mastodon/fixtures/instance.json new file mode 100644 index 00000000000..b0e904e80ef --- /dev/null +++ b/tests/components/mastodon/fixtures/instance.json @@ -0,0 +1,147 @@ +{ + "domain": "mastodon.social", + "title": "Mastodon", + "version": "4.0.0rc1", + "source_url": "https://github.com/mastodon/mastodon", + "description": "The original server operated by the Mastodon gGmbH non-profit", + "usage": { + "users": { + "active_month": 123122 + } + }, + "thumbnail": { + "url": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", + "blurhash": "UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$", + "versions": { + "@1x": "https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png", + "@2x": "https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png" + } + }, + "languages": ["en"], + "configuration": { + "urls": { + "streaming": "wss://mastodon.social" + }, + "vapid": { + "public_key": "BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=" + }, + "accounts": { + "max_featured_tags": 10, + "max_pinned_statuses": 4 + }, + "statuses": { + "max_characters": 500, + "max_media_attachments": 4, + "characters_reserved_per_url": 23 + }, + "media_attachments": { + "supported_mime_types": [ + "image/jpeg", + "image/png", + "image/gif", + "image/heic", + "image/heif", + "image/webp", + "video/webm", + "video/mp4", + "video/quicktime", + "video/ogg", + "audio/wave", + "audio/wav", + "audio/x-wav", + "audio/x-pn-wave", + "audio/vnd.wave", + "audio/ogg", + "audio/vorbis", + "audio/mpeg", + "audio/mp3", + "audio/webm", + "audio/flac", + "audio/aac", + "audio/m4a", + "audio/x-m4a", + "audio/mp4", + "audio/3gpp", + "video/x-ms-asf" + ], + "image_size_limit": 10485760, + "image_matrix_limit": 16777216, + "video_size_limit": 41943040, + "video_frame_rate_limit": 60, + "video_matrix_limit": 2304000 + }, + "polls": { + "max_options": 4, + "max_characters_per_option": 50, + "min_expiration": 300, + "max_expiration": 2629746 + }, + "translation": { + "enabled": true + } + }, + "registrations": { + "enabled": false, + "approval_required": false, + "message": null + }, + "contact": { + "email": "staff@mastodon.social", + "account": { + "id": "1", + "username": "Gargron", + "acct": "Gargron", + "display_name": "Eugen 💀", + "locked": false, + "bot": false, + "discoverable": true, + "group": false, + "created_at": "2016-03-16T00:00:00.000Z", + "note": "

Founder, CEO and lead developer @Mastodon, Germany.

", + "url": "https://mastodon.social/@Gargron", + "avatar": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", + "avatar_static": "https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg", + "header": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", + "header_static": "https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg", + "followers_count": 133026, + "following_count": 311, + "statuses_count": 72605, + "last_status_at": "2022-10-31", + "noindex": false, + "emojis": [], + "fields": [ + { + "name": "Patreon", + "value": "https://www.patreon.com/mastodon", + "verified_at": null + } + ] + } + }, + "rules": [ + { + "id": "1", + "text": "Sexually explicit or violent media must be marked as sensitive when posting" + }, + { + "id": "2", + "text": "No racism, sexism, homophobia, transphobia, xenophobia, or casteism" + }, + { + "id": "3", + "text": "No incitement of violence or promotion of violent ideologies" + }, + { + "id": "4", + "text": "No harassment, dogpiling or doxxing of other users" + }, + { + "id": "5", + "text": "No content illegal in Germany" + }, + { + "id": "7", + "text": "Do not share intentionally false or misleading information" + } + ] +} diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr new file mode 100644 index 00000000000..f0b650076be --- /dev/null +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'mastodon', + 'client_id', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Mastodon gGmbH', + 'model': '@trwnh@mastodon.social', + 'model_id': None, + 'name': 'Mastodon', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.0.0rc1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py new file mode 100644 index 00000000000..01cdc061d3e --- /dev/null +++ b/tests/components/mastodon/test_config_flow.py @@ -0,0 +1,179 @@ +"""Tests for the Mastodon config flow.""" + +from unittest.mock import AsyncMock + +from mastodon.Mastodon import MastodonNetworkError, MastodonUnauthorizedError +import pytest + +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "@trwnh@mastodon.social" + assert result["data"] == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert result["result"].unique_id == "client_id" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MastodonNetworkError, "network_error"), + (MastodonUnauthorizedError, "unauthorized_error"), + (Exception, "unknown"), + ], +) +async def test_flow_errors( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test flow errors.""" + mock_mastodon_client.account_verify_credentials.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error} + + mock_mastodon_client.account_verify_credentials.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test duplicate flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_import_flow( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test importing yaml config.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "import_client_id", + CONF_CLIENT_SECRET: "import_client_secret", + CONF_ACCESS_TOKEN: "import_access_token", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MastodonNetworkError, "network_error"), + (MastodonUnauthorizedError, "unauthorized_error"), + (Exception, "unknown"), + ], +) +async def test_import_flow_abort( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_setup_entry: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test importing yaml config abort.""" + mock_mastodon_client.account_verify_credentials.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "import_client_id", + CONF_CLIENT_SECRET: "import_client_secret", + CONF_ACCESS_TOKEN: "import_access_token", + }, + ) + assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/mastodon/test_init.py b/tests/components/mastodon/test_init.py new file mode 100644 index 00000000000..53796e39782 --- /dev/null +++ b/tests/components/mastodon/test_init.py @@ -0,0 +1,25 @@ +"""Tests for the Mastodon integration.""" + +from unittest.mock import AsyncMock + +from mastodon.Mastodon import MastodonError + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_initialization_failure( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test initialization failure.""" + mock_mastodon_client.instance.side_effect = MastodonError + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/mastodon/test_notify.py b/tests/components/mastodon/test_notify.py new file mode 100644 index 00000000000..ab2d7456baf --- /dev/null +++ b/tests/components/mastodon/test_notify.py @@ -0,0 +1,38 @@ +"""Tests for the Mastodon notify platform.""" + +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_notify( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test sending a message.""" + await setup_integration(hass, mock_config_entry) + + assert hass.services.has_service(NOTIFY_DOMAIN, "trwnh_mastodon_social") + + await hass.services.async_call( + NOTIFY_DOMAIN, + "trwnh_mastodon_social", + { + "message": "test toot", + }, + blocking=True, + return_response=False, + ) + + assert mock_mastodon_client.status_post.assert_called_once diff --git a/tests/components/matrix/conftest.py b/tests/components/matrix/conftest.py index bb5448a8a09..0b84aff5434 100644 --- a/tests/components/matrix/conftest.py +++ b/tests/components/matrix/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from pathlib import Path import re import tempfile @@ -24,7 +25,6 @@ from nio import ( ) from PIL import Image import pytest -from typing_extensions import Generator from homeassistant.components.matrix import ( CONF_COMMANDS, @@ -48,7 +48,7 @@ from homeassistant.const import ( CONF_USERNAME, CONF_VERIFY_SSL, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import async_capture_events @@ -294,13 +294,13 @@ async def matrix_bot( @pytest.fixture -def matrix_events(hass: HomeAssistant): +def matrix_events(hass: HomeAssistant) -> list[Event]: """Track event calls.""" return async_capture_events(hass, MATRIX_DOMAIN) @pytest.fixture -def command_events(hass: HomeAssistant): +def command_events(hass: HomeAssistant) -> list[Event]: """Track event calls.""" return async_capture_events(hass, EVENT_MATRIX_COMMAND) diff --git a/tests/components/matrix/test_commands.py b/tests/components/matrix/test_commands.py index 8539252ad66..dabee74fdc3 100644 --- a/tests/components/matrix/test_commands.py +++ b/tests/components/matrix/test_commands.py @@ -1,11 +1,11 @@ """Test MatrixBot's ability to parse and respond to commands in matrix rooms.""" +from dataclasses import dataclass from functools import partial from itertools import chain from typing import Any from nio import MatrixRoom, RoomMessageText -from pydantic.dataclasses import dataclass import pytest from homeassistant.components.matrix import MatrixBot, RoomID diff --git a/tests/components/matrix/test_send_message.py b/tests/components/matrix/test_send_message.py index cdea2270cf9..3db2877e789 100644 --- a/tests/components/matrix/test_send_message.py +++ b/tests/components/matrix/test_send_message.py @@ -10,7 +10,7 @@ from homeassistant.components.matrix import ( ) from homeassistant.components.matrix.const import FORMAT_HTML, SERVICE_SEND_MESSAGE from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from .conftest import TEST_BAD_ROOM, TEST_JOINABLE_ROOMS @@ -19,7 +19,7 @@ async def test_send_message( hass: HomeAssistant, matrix_bot: MatrixBot, image_path, - matrix_events, + matrix_events: list[Event], caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service.""" @@ -63,7 +63,7 @@ async def test_send_message( async def test_unsendable_message( hass: HomeAssistant, matrix_bot: MatrixBot, - matrix_events, + matrix_events: list[Event], caplog: pytest.LogCaptureFixture, ) -> None: """Test the send_message service with an invalid room.""" diff --git a/tests/components/matter/common.py b/tests/components/matter/common.py index 7878ac564fd..541f7383f1d 100644 --- a/tests/components/matter/common.py +++ b/tests/components/matter/common.py @@ -31,9 +31,12 @@ async def setup_integration_with_node_fixture( hass: HomeAssistant, node_fixture: str, client: MagicMock, + override_attributes: dict[str, Any] | None = None, ) -> MatterNode: """Set up Matter integration with fixture as node.""" node_data = load_and_parse_node_fixture(node_fixture) + if override_attributes: + node_data["attributes"].update(override_attributes) node = MatterNode( dataclass_from_dict( MatterNodeData, diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index 05fd776e57a..f3d8740a73b 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -3,13 +3,13 @@ from __future__ import annotations import asyncio +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode from matter_server.common.const import SCHEMA_VERSION from matter_server.common.models import ServerInfoMessage import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.core import HomeAssistant @@ -51,6 +51,7 @@ async def matter_client_fixture() -> AsyncGenerator[MagicMock]: wifi_credentials_set=True, thread_credentials_set=True, min_supported_schema_version=SCHEMA_VERSION, + bluetooth_enabled=False, ) yield client diff --git a/tests/components/matter/fixtures/config_entry_diagnostics.json b/tests/components/matter/fixtures/config_entry_diagnostics.json index f591709fbda..000b0d4e2e6 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics.json @@ -6,7 +6,8 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1 + "min_supported_schema_version": 1, + "bluetooth_enabled": false }, "nodes": [ { diff --git a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json index 503fd3b9a7a..95447783bbc 100644 --- a/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json +++ b/tests/components/matter/fixtures/config_entry_diagnostics_redacted.json @@ -7,7 +7,8 @@ "sdk_version": "2022.12.0", "wifi_credentials_set": true, "thread_credentials_set": false, - "min_supported_schema_version": 1 + "min_supported_schema_version": 1, + "bluetooth_enabled": false }, "nodes": [ { diff --git a/tests/components/matter/fixtures/nodes/dimmable-light.json b/tests/components/matter/fixtures/nodes/dimmable-light.json index 74f132a88a9..58c22f1b807 100644 --- a/tests/components/matter/fixtures/nodes/dimmable-light.json +++ b/tests/components/matter/fixtures/nodes/dimmable-light.json @@ -78,7 +78,7 @@ ], "0/42/0": [], "0/42/1": true, - "0/42/2": 0, + "0/42/2": 1, "0/42/3": 0, "0/42/65532": 0, "0/42/65533": 1, @@ -365,7 +365,148 @@ "1/29/65533": 1, "1/29/65528": [], "1/29/65529": [], - "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533] + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "6/80/0": "LED Color", + "6/80/1": 0, + "6/80/2": [ + { + "0": "Red", + "1": 0, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Orange", + "1": 1, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Lemon", + "1": 2, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Lime", + "1": 3, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Green", + "1": 4, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Teal", + "1": 5, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Cyan", + "1": 6, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Aqua", + "1": 7, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Blue", + "1": 8, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Violet", + "1": 9, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Magenta", + "1": 10, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "Pink", + "1": 11, + "2": [ + { + "0": 0, + "1": 0 + } + ] + }, + { + "0": "White", + "1": 12, + "2": [ + { + "0": 0, + "1": 0 + } + ] + } + ], + "6/80/3": 7, + "6/80/65532": 0, + "6/80/65533": 1, + "6/80/65528": [], + "6/80/65529": [0], + "6/80/65530": [], + "6/80/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533] }, "available": true, "attribute_subscriptions": [] diff --git a/tests/components/matter/fixtures/nodes/fan.json b/tests/components/matter/fixtures/nodes/fan.json new file mode 100644 index 00000000000..e33c29ce66d --- /dev/null +++ b/tests/components/matter/fixtures/nodes/fan.json @@ -0,0 +1,340 @@ +{ + "node_id": 29, + "date_commissioned": "2024-07-25T08:34:23.014310", + "last_interview": "2024-07-25T08:34:23.014315", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 18, + "1": 1 + }, + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63, 64], + "0/29/2": [41], + "0/29/3": [1, 2, 3, 4, 5, 6], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65530": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 5 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65530": [0, 1], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Mock", + "0/40/2": 4961, + "0/40/3": "Fan", + "0/40/4": 2, + "0/40/5": "Mocked Fan Switch", + "0/40/6": "**REDACTED**", + "0/40/7": 1, + "0/40/8": "1.0", + "0/40/9": 4, + "0/40/10": "0.0.1", + "0/40/11": "", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "", + "0/40/16": false, + "0/40/17": true, + "0/40/18": "", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 2, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65530": [0], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/42/0": [], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65530": [0, 1, 2], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65530": [], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "J/YquJb4Ao4=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "J/YquJb4Ao4=", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65530": [], + "0/49/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/51/0": [], + "0/51/1": 15, + "0/51/2": 5688, + "0/51/3": 1, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65530": [3], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "ha-thread", + "0/53/3": 12768, + "0/53/4": 5924944741529093989, + "0/53/5": "", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 933034070, + "0/53/10": 68, + "0/53/11": 16, + "0/53/12": 151, + "0/53/13": 31, + "0/53/14": 1, + "0/53/15": 0, + "0/53/16": 1, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 1, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 3533, + "0/53/23": 3105, + "0/53/24": 428, + "0/53/25": 1889, + "0/53/26": 1879, + "0/53/27": 1644, + "0/53/28": 2317, + "0/53/29": 0, + "0/53/30": 1216, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 534, + "0/53/34": 10, + "0/53/35": 0, + "0/53/36": 42, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 18130, + "0/53/40": 12178, + "0/53/41": 5863, + "0/53/42": 5103, + "0/53/43": 0, + "0/53/44": 11639, + "0/53/45": 1216, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 0, + "0/53/49": 14, + "0/53/50": 0, + "0/53/51": 89, + "0/53/52": 0, + "0/53/53": 69, + "0/53/54": 0, + "0/53/55": 0, + "0/53/56": 131072, + "0/53/57": 0, + "0/53/58": 0, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//4A==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [0, 0, 0, 0], + "0/53/65532": 15, + "0/53/65533": 1, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65530": [], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 65528, 65529, 65530, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65530": [], + "0/60/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 4, + "0/62/4": [], + "0/62/5": 5, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65530": [], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65530": [], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "0/64/0": [ + { + "0": "Vendor", + "1": "Mocked" + }, + { + "0": "Product", + "1": "Fan" + } + ], + "0/64/65532": 0, + "0/64/65533": 1, + "0/64/65528": [], + "0/64/65529": [], + "0/64/65530": [], + "0/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65530": [], + "1/3/65531": [0, 1, 65528, 65529, 65530, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 4, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65530": [], + "1/4/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 43, + "1": 1 + } + ], + "1/29/1": [3, 4, 6, 8, 29, 64, 80, 514, 305134641], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65530": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533], + "1/64/0": [ + { + "0": "DeviceType", + "1": "Fan" + } + ], + "1/64/65532": 0, + "1/64/65533": 1, + "1/64/65528": [], + "1/64/65529": [], + "1/64/65530": [], + "1/64/65531": [0, 65528, 65529, 65530, 65531, 65532, 65533], + + "1/514/0": 8, + "1/514/1": 2, + "1/514/2": 0, + "1/514/3": 0, + "1/514/4": 3, + "1/514/5": 0, + "1/514/6": 0, + "1/514/9": 3, + "1/514/10": 0, + "1/514/65532": 25, + "1/514/65533": 4, + "1/514/65528": [], + "1/514/65529": [0], + "1/514/65530": [], + "1/514/65531": [ + 0, 1, 2, 3, 4, 5, 6, 9, 10, 65528, 65529, 65530, 65531, 65532, 65533 + ] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/fixtures/nodes/generic-switch-multi.json b/tests/components/matter/fixtures/nodes/generic-switch-multi.json index f564e91a1ce..8923198c31e 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch-multi.json +++ b/tests/components/matter/fixtures/nodes/generic-switch-multi.json @@ -72,8 +72,9 @@ "1/59/0": 2, "1/59/65533": 1, "1/59/1": 0, + "1/59/2": 2, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 14, + "1/59/65532": 30, "1/59/65528": [], "1/64/0": [ { @@ -101,8 +102,9 @@ "2/59/0": 2, "2/59/65533": 1, "2/59/1": 0, + "2/59/2": 2, "2/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "2/59/65532": 14, + "2/59/65532": 30, "2/59/65528": [], "2/64/0": [ { diff --git a/tests/components/matter/fixtures/nodes/generic-switch.json b/tests/components/matter/fixtures/nodes/generic-switch.json index 80773915748..9b334c5fb54 100644 --- a/tests/components/matter/fixtures/nodes/generic-switch.json +++ b/tests/components/matter/fixtures/nodes/generic-switch.json @@ -73,7 +73,7 @@ "1/59/65533": 1, "1/59/1": 0, "1/59/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], - "1/59/65532": 30, + "1/59/65532": 14, "1/59/65528": [] }, "available": true, diff --git a/tests/components/matter/fixtures/nodes/microwave-oven.json b/tests/components/matter/fixtures/nodes/microwave-oven.json new file mode 100644 index 00000000000..ed0a4accd6a --- /dev/null +++ b/tests/components/matter/fixtures/nodes/microwave-oven.json @@ -0,0 +1,405 @@ +{ + "node_id": 157, + "date_commissioned": "2024-07-04T12:31:22.759270", + "last_interview": "2024-07-04T12:31:22.759275", + "interview_version": 6, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 44, 48, 49, 51, 54, 60, 62, 63], + "0/29/2": [], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 2, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 1 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 4, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 17, + "0/40/1": "Mock", + "0/40/2": 65521, + "0/40/3": "Microwave Oven", + "0/40/4": 32769, + "0/40/5": "", + "0/40/6": "**REDACTED**", + "0/40/7": 0, + "0/40/8": "TEST_VERSION", + "0/40/9": 1, + "0/40/10": "1.0", + "0/40/11": "20200101", + "0/40/12": "", + "0/40/13": "", + "0/40/14": "", + "0/40/15": "TEST_SN", + "0/40/16": false, + "0/40/18": "D5908CF5E1382F42", + "0/40/19": { + "0": 3, + "1": 65535 + }, + "0/40/20": null, + "0/40/21": 16973824, + "0/40/22": 1, + "0/40/65532": 0, + "0/40/65533": 3, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20, 21, + 22, 65528, 65529, 65531, 65532, 65533 + ], + "0/44/0": 0, + "0/44/65532": 0, + "0/44/65533": 1, + "0/44/65528": [], + "0/44/65529": [], + "0/44/65531": [0, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 2, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "ZW5kMA==", + "1": true + } + ], + "0/49/2": 0, + "0/49/3": 0, + "0/49/4": true, + "0/49/5": null, + "0/49/6": null, + "0/49/7": null, + "0/49/65532": 4, + "0/49/65533": 2, + "0/49/65528": [], + "0/49/65529": [], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [ + { + "0": "vethd3cc78a", + "1": true, + "2": null, + "3": null, + "4": "RiMoOM7I", + "5": [], + "6": ["/oAAAAAAAABEIyj//jjOyA=="], + "7": 0 + }, + { + "0": "veth86f4b74", + "1": true, + "2": null, + "3": null, + "4": "ehLA7XI6", + "5": [], + "6": ["/oAAAAAAAAB4EsD//u1yOg=="], + "7": 0 + }, + { + "0": "veth36c1460", + "1": true, + "2": null, + "3": null, + "4": "0sdiwOO7", + "5": [], + "6": ["/oAAAAAAAADQx2L//sDjuw=="], + "7": 0 + }, + { + "0": "veth55a0982", + "1": true, + "2": null, + "3": null, + "4": "fuu5VpgB", + "5": [], + "6": ["/oAAAAAAAAB867n//laYAQ=="], + "7": 0 + }, + { + "0": "vethd446fa5", + "1": true, + "2": null, + "3": null, + "4": "QsY5wCp1", + "5": [], + "6": ["/oAAAAAAAABAxjn//sAqdQ=="], + "7": 0 + }, + { + "0": "vethfc6e4d6", + "1": true, + "2": null, + "3": null, + "4": "IsHWia4E", + "5": [], + "6": ["/oAAAAAAAAAgwdb//omuBA=="], + "7": 0 + }, + { + "0": "veth4b35142", + "1": true, + "2": null, + "3": null, + "4": "RizM/XJz", + "5": [], + "6": ["/oAAAAAAAABELMz//v1ycw=="], + "7": 0 + }, + { + "0": "vetha0a808d", + "1": true, + "2": null, + "3": null, + "4": "JrxkpiTq", + "5": [], + "6": ["/oAAAAAAAAAkvGT//qYk6g=="], + "7": 0 + }, + { + "0": "hassio", + "1": true, + "2": null, + "3": null, + "4": "AkL+6fKF", + "5": ["rB4gAQ=="], + "6": ["/oAAAAAAAAAAQv7//unyhQ=="], + "7": 0 + }, + { + "0": "docker0", + "1": true, + "2": null, + "3": null, + "4": "AkKzcIpP", + "5": ["rB7oAQ=="], + "6": ["/oAAAAAAAAAAQrP//nCKTw=="], + "7": 0 + }, + { + "0": "end0", + "1": true, + "2": null, + "3": null, + "4": "5F8BoroJ", + "5": ["wKgBAg=="], + "6": [ + "KgKkZACnAAHGF8Tinim+lQ==", + "/XH1Cm7wY08fhLPRgO32Uw==", + "/oAAAAAAAAAENYnD2gV25w==" + ], + "7": 2 + }, + { + "0": "lo", + "1": true, + "2": null, + "3": null, + "4": "AAAAAAAA", + "5": ["fwAAAQ=="], + "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], + "7": 0 + } + ], + "0/51/1": 1, + "0/51/2": 16, + "0/51/3": 0, + "0/51/4": 0, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 2, + "0/51/65528": [2], + "0/51/65529": [0, 1], + "0/51/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533 + ], + "0/54/0": null, + "0/54/1": null, + "0/54/2": null, + "0/54/3": null, + "0/54/4": null, + "0/54/5": null, + "0/54/6": null, + "0/54/7": null, + "0/54/8": null, + "0/54/9": null, + "0/54/10": null, + "0/54/11": null, + "0/54/12": null, + "0/54/65532": 3, + "0/54/65533": 1, + "0/54/65528": [], + "0/54/65529": [0], + "0/54/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, + 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [ + { + "1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVASQRnRgkBwEkCAEwCUEEleMInA+X+lZO6bSa7ysHaAvYS13Fg9GoRuhiFk+wvtjLUrouyH+DUp3p3purrVdfUWTp03damVsxp9Lv48goDzcKNQEoARgkAgE2AwQCBAEYMAQUrD2d44zyVXjKbyYgNaEibaXFI7IwBRTphWiJ/NqGe3Cx3Nj8H02NgGioSRgwC0CaASOOwmsHE8cNw7FhQDtRhh0ztvwdfZKANU93vrX/+ww8UifrTjUIgvobgixpCGxmGvEmk3RN7TX6lgX4Qz7MGA==", + "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEYztrLK2UY1ORHUEFLO7PDfVjw/MnMDNX5kjdHHDU7npeITnSyg/kxxUM+pD7ccxfDuHQKHbBq9+qbJi8oGik8DcKNQEpARgkAmAwBBTphWiJ/NqGe3Cx3Nj8H02NgGioSTAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQOOcZAL8XEktvE5sjrUmFNhkP2g3Ef+4BHtogItdZYyA9E/WbzW25E0UxZInwjjIzH3YimDUZVoEWGML8NV2kCEY", + "254": 1 + } + ], + "0/62/1": [ + { + "1": "BAg5aeR7RuFKZhukCxMGglCd00dKlhxGq8BbjeyZClKz5kN2Ytzav0xWsiWEEb3s9uvMIYFoQYULnSJvOMTcD14=", + "2": 65521, + "3": 1, + "4": 157, + "5": "", + "254": 1 + } + ], + "0/62/2": 16, + "0/62/3": 1, + "0/62/4": [ + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" + ], + "0/62/5": 1, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 4, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 2, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 0, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0, 64], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/29/0": [ + { + "0": 121, + "1": 1 + } + ], + "1/29/1": [3, 29, 94, 95, 96], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 2, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/94/0": [ + { + "0": "Normal", + "1": 0, + "2": [ + { + "1": 16384 + } + ] + }, + { + "0": "Defrost", + "1": 1, + "2": [ + { + "1": 16385 + } + ] + } + ], + "1/94/1": 0, + "1/94/65532": 0, + "1/94/65533": 1, + "1/94/65528": [], + "1/94/65529": [], + "1/94/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/95/0": 30, + "1/95/1": 86400, + "1/95/2": 90, + "1/95/3": 20, + "1/95/4": 90, + "1/95/5": 10, + "1/95/8": 1000, + "1/95/65532": 5, + "1/95/65533": 1, + "1/95/65528": [], + "1/95/65529": [0, 1], + "1/95/65531": [0, 1, 2, 3, 4, 5, 8, 65528, 65529, 65531, 65532, 65533], + "1/96/0": null, + "1/96/1": null, + "1/96/2": 30, + "1/96/3": [ + { + "0": 0 + }, + { + "0": 1 + }, + { + "0": 2 + }, + { + "0": 3 + } + ], + "1/96/4": 0, + "1/96/5": { + "0": 0 + }, + "1/96/65532": 0, + "1/96/65533": 2, + "1/96/65528": [4], + "1/96/65529": [0, 1, 2, 3], + "1/96/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533] + }, + "attribute_subscriptions": [] +} diff --git a/tests/components/matter/test_binary_sensor.py b/tests/components/matter/test_binary_sensor.py index becedc0af62..f419a12c59f 100644 --- a/tests/components/matter/test_binary_sensor.py +++ b/tests/components/matter/test_binary_sensor.py @@ -1,10 +1,10 @@ """Test Matter binary sensors.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from matter_server.client.models.node import MatterNode import pytest -from typing_extensions import Generator from homeassistant.components.matter.binary_sensor import ( DISCOVERY_SCHEMAS as BINARY_SENSOR_SCHEMAS, diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index e0015e8b445..4d6978edfde 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -350,3 +350,9 @@ async def test_room_airconditioner( state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.state == HVACMode.DRY + + # test featuremap update + set_node_attribute(room_airconditioner, 1, 513, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("climate.room_airconditioner_thermostat") + assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index 562cf4bb86a..642bfe0f804 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator from ipaddress import ip_address from typing import Any from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.hassio import HassioAPIError, HassioServiceInfo diff --git a/tests/components/matter/test_event.py b/tests/components/matter/test_event.py index a7bd7c91f7b..183867642f5 100644 --- a/tests/components/matter/test_event.py +++ b/tests/components/matter/test_event.py @@ -50,8 +50,6 @@ async def test_generic_switch_node( "short_release", "long_press", "long_release", - "multi_press_ongoing", - "multi_press_complete", ] # trigger firing a new event from the device await trigger_subscription_callback( @@ -72,26 +70,6 @@ async def test_generic_switch_node( ) state = hass.states.get("event.mock_generic_switch_button") assert state.attributes[ATTR_EVENT_TYPE] == "initial_press" - # trigger firing a multi press event - await trigger_subscription_callback( - hass, - matter_client, - EventType.NODE_EVENT, - MatterNodeEvent( - node_id=generic_switch_node.node_id, - endpoint_id=1, - cluster_id=59, - event_id=5, - event_number=0, - priority=1, - timestamp=0, - timestamp_type=0, - data={"NewPosition": 3}, - ), - ) - state = hass.states.get("event.mock_generic_switch_button") - assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_ongoing" - assert state.attributes["NewPosition"] == 3 # This tests needs to be adjusted to remove lingering tasks @@ -109,8 +87,8 @@ async def test_generic_switch_multi_node( assert state_button_1.name == "Mock Generic Switch Button (1)" # check event_types from featuremap 14 assert state_button_1.attributes[ATTR_EVENT_TYPES] == [ - "initial_press", - "short_release", + "multi_press_1", + "multi_press_2", "long_press", "long_release", ] @@ -120,3 +98,23 @@ async def test_generic_switch_multi_node( assert state_button_1.state == "unknown" # name should be 'DeviceName Fancy Button' due to the label set to 'Fancy Button' assert state_button_1.name == "Mock Generic Switch Fancy Button" + + # trigger firing a multi press event + await trigger_subscription_callback( + hass, + matter_client, + EventType.NODE_EVENT, + MatterNodeEvent( + node_id=generic_switch_multi_node.node_id, + endpoint_id=1, + cluster_id=59, + event_id=6, + event_number=0, + priority=1, + timestamp=0, + timestamp_type=0, + data={"totalNumberOfPressesCounted": 2}, + ), + ) + state = hass.states.get("event.mock_generic_switch_button_1") + assert state.attributes[ATTR_EVENT_TYPE] == "multi_press_2" diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 7e964d672ca..690209b1165 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -1,5 +1,6 @@ """Test Matter Fan platform.""" +from typing import Any from unittest.mock import MagicMock, call from matter_server.client.models.node import MatterNode @@ -27,6 +28,14 @@ from .common import ( ) +@pytest.fixture(name="fan_node") +async def simple_fan_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Fan node.""" + return await setup_integration_with_node_fixture(hass, "fan", matter_client) + + @pytest.fixture(name="air_purifier") async def air_purifier_fixture( hass: HomeAssistant, matter_client: MagicMock @@ -98,8 +107,14 @@ async def test_fan_base( state = hass.states.get(entity_id) assert state.attributes["preset_mode"] is None assert state.attributes["percentage"] == 0 + # test featuremap update + set_node_attribute(air_purifier, 1, 514, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.attributes["supported_features"] & FanEntityFeature.SET_SPEED +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_fan_turn_on_with_percentage( hass: HomeAssistant, matter_client: MagicMock, @@ -119,15 +134,31 @@ async def test_fan_turn_on_with_percentage( attribute_path="1/514/2", value=50, ) + # test again where preset_mode is omitted in the service call + # which should select the last active percentage + matter_client.write_attribute.reset_mock() + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args == call( + node_id=air_purifier.node_id, + attribute_path="1/514/2", + value=255, + ) +@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_fan_turn_on_with_preset_mode( hass: HomeAssistant, matter_client: MagicMock, - air_purifier: MatterNode, + fan_node: MatterNode, ) -> None: """Test turning on the fan with a specific preset mode.""" - entity_id = "fan.air_purifier_fan" + entity_id = "fan.mocked_fan_switch_fan" await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, @@ -136,7 +167,7 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/0", value=2, ) @@ -151,28 +182,13 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 1 assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/10", value=value, ) - # test again where preset_mode is omitted in the service call - # which should select a default preset mode - matter_client.write_attribute.reset_mock() - await hass.services.async_call( - FAN_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - assert matter_client.write_attribute.call_count == 1 - assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, - attribute_path="1/514/0", - value=5, - ) # test again if wind mode is explicitly turned off when we set a new preset mode matter_client.write_attribute.reset_mock() - set_node_attribute(air_purifier, 1, 514, 10, 2) + set_node_attribute(fan_node, 1, 514, 10, 2) await trigger_subscription_callback(hass, matter_client) await hass.services.async_call( FAN_DOMAIN, @@ -182,15 +198,33 @@ async def test_fan_turn_on_with_preset_mode( ) assert matter_client.write_attribute.call_count == 2 assert matter_client.write_attribute.call_args_list[0] == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/10", value=0, ) assert matter_client.write_attribute.call_args == call( - node_id=air_purifier.node_id, + node_id=fan_node.node_id, attribute_path="1/514/0", value=2, ) + # test again where preset_mode is omitted in the service call + # which should select the last active preset + matter_client.write_attribute.reset_mock() + set_node_attribute(fan_node, 1, 514, 0, 1) + set_node_attribute(fan_node, 1, 514, 10, 0) + await trigger_subscription_callback(hass, matter_client) + await hass.services.async_call( + FAN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + assert matter_client.write_attribute.call_count == 1 + assert matter_client.write_attribute.call_args == call( + node_id=fan_node.node_id, + attribute_path="1/514/0", + value=1, + ) async def test_fan_turn_off( @@ -279,3 +313,133 @@ async def test_fan_set_direction( value=value, ) matter_client.write_attribute.reset_mock() + + +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize( + ("fixture", "entity_id", "attributes", "features"), + [ + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 0, + }, + (FanEntityFeature.TURN_ON | FanEntityFeature.TURN_OFF), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 1, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.SET_SPEED + ), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 4, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.OSCILLATE + ), + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + { + "1/514/65532": 36, + }, + ( + FanEntityFeature.TURN_ON + | FanEntityFeature.TURN_OFF + | FanEntityFeature.OSCILLATE + | FanEntityFeature.DIRECTION + ), + ), + ], +) +async def test_fan_supported_features( + hass: HomeAssistant, + matter_client: MagicMock, + fixture: str, + entity_id: str, + attributes: dict[str, Any], + features: int, +) -> None: + """Test if the correct features get discovered from featuremap.""" + await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) + state = hass.states.get(entity_id) + assert state + assert state.attributes["supported_features"] & features == features + + +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +@pytest.mark.parametrize( + ("fixture", "entity_id", "attributes", "preset_modes"), + [ + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 0, "1/514/65532": 0}, + [ + "low", + "medium", + "high", + ], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 1, "1/514/65532": 0}, + [ + "low", + "high", + ], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 2, "1/514/65532": 0}, + ["low", "medium", "high", "auto"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 4, "1/514/65532": 0}, + ["high", "auto"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 5, "1/514/65532": 0}, + ["high"], + ), + ( + "fan", + "fan.mocked_fan_switch_fan", + {"1/514/1": 5, "1/514/65532": 8, "1/514/9": 3}, + ["high", "natural_wind", "sleep_wind"], + ), + ], +) +async def test_fan_features( + hass: HomeAssistant, + matter_client: MagicMock, + fixture: str, + entity_id: str, + attributes: dict[str, Any], + preset_modes: list[str], +) -> None: + """Test if the correct presets get discovered from fanmodesequence.""" + await setup_integration_with_node_fixture(hass, fixture, matter_client, attributes) + state = hass.states.get(entity_id) + assert state + assert state.attributes["preset_modes"] == preset_modes diff --git a/tests/components/matter/test_init.py b/tests/components/matter/test_init.py index c28385efca3..cd5ef307cd3 100644 --- a/tests/components/matter/test_init.py +++ b/tests/components/matter/test_init.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, call, patch from matter_server.client.exceptions import ( @@ -15,7 +16,6 @@ from matter_server.common.errors import MatterError from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.models import MatterNodeData import pytest -from typing_extensions import Generator from homeassistant.components.hassio import HassioAPIError from homeassistant.components.matter.const import DOMAIN diff --git a/tests/components/matter/test_lock.py b/tests/components/matter/test_lock.py index 1180e6ee469..f279430b393 100644 --- a/tests/components/matter/test_lock.py +++ b/tests/components/matter/test_lock.py @@ -97,6 +97,12 @@ async def test_lock( assert state assert state.state == STATE_UNKNOWN + # test featuremap update + set_node_attribute(door_lock, 1, 257, 65532, 4096) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("lock.mock_door_lock_lock") + assert state.attributes["supported_features"] & LockEntityFeature.OPEN + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py new file mode 100644 index 00000000000..f84e5870392 --- /dev/null +++ b/tests/components/matter/test_select.py @@ -0,0 +1,99 @@ +"""Test Matter select entities.""" + +from unittest.mock import MagicMock, call + +from chip.clusters import Objects as clusters +from matter_server.client.models.node import MatterNode +import pytest + +from homeassistant.core import HomeAssistant + +from .common import ( + set_node_attribute, + setup_integration_with_node_fixture, + trigger_subscription_callback, +) + + +@pytest.fixture(name="light_node") +async def dimmable_light_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a dimmable light node.""" + return await setup_integration_with_node_fixture( + hass, "dimmable-light", matter_client + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_mode_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + light_node: MatterNode, +) -> None: + """Test select entities are created for the ModeSelect cluster attributes.""" + state = hass.states.get("select.mock_dimmable_light_led_color") + assert state + assert state.state == "Aqua" + assert state.attributes["options"] == [ + "Red", + "Orange", + "Lemon", + "Lime", + "Green", + "Teal", + "Cyan", + "Aqua", + "Blue", + "Violet", + "Magenta", + "Pink", + "White", + ] + # name should be derived from description attribute + assert state.attributes["friendly_name"] == "Mock Dimmable Light LED Color" + set_node_attribute(light_node, 6, 80, 3, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("select.mock_dimmable_light_led_color") + assert state.state == "Orange" + # test select option + await hass.services.async_call( + "select", + "select_option", + { + "entity_id": "select.mock_dimmable_light_led_color", + "option": "Lime", + }, + blocking=True, + ) + + assert matter_client.send_device_command.call_count == 1 + assert matter_client.send_device_command.call_args == call( + node_id=light_node.node_id, + endpoint_id=6, + command=clusters.ModeSelect.Commands.ChangeToMode(newMode=3), + ) + + +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_attribute_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + light_node: MatterNode, +) -> None: + """Test select entities are created for attribute based discovery schema(s).""" + entity_id = "select.mock_dimmable_light_power_on_behavior_on_startup" + state = hass.states.get(entity_id) + assert state + assert state.state == "Previous" + assert state.attributes["options"] == ["On", "Off", "Toggle", "Previous"] + assert ( + state.attributes["friendly_name"] + == "Mock Dimmable Light Power-on behavior on Startup" + ) + set_node_attribute(light_node, 1, 6, 16387, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.state == "On" diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py new file mode 100644 index 00000000000..73c69407bbc --- /dev/null +++ b/tests/components/matter/test_update.py @@ -0,0 +1,171 @@ +"""Test Matter number entities.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +from chip.clusters import Objects as clusters +from chip.clusters.ClusterObjects import ClusterAttributeDescriptor +from matter_server.client.models.node import MatterNode +from matter_server.common.models import MatterSoftwareVersion, UpdateSource +import pytest + +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .common import ( + set_node_attribute, + setup_integration_with_node_fixture, + trigger_subscription_callback, +) + + +def set_node_attribute_typed( + node: MatterNode, + endpoint: int, + attribute: ClusterAttributeDescriptor, + value: Any, +) -> None: + """Set a node attribute.""" + set_node_attribute( + node, endpoint, attribute.cluster_id, attribute.attribute_id, value + ) + + +@pytest.fixture(name="check_node_update") +async def check_node_update_fixture(matter_client: MagicMock) -> AsyncMock: + """Fixture for a flow sensor node.""" + matter_client.check_node_update = AsyncMock(return_value=None) + return matter_client.check_node_update + + +@pytest.fixture(name="updateable_node") +async def updateable_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a flow sensor node.""" + return await setup_integration_with_node_fixture( + hass, "dimmable-light", matter_client + ) + + +async def test_update_entity( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, +) -> None: + """Test update entity exists and update check got made.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + + assert matter_client.check_node_update.call_count == 1 + + +async def test_update_install( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, +) -> None: + """Test update entity exists and update check got made.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + await async_setup_component(hass, "homeassistant", {}) + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + await hass.services.async_call( + "homeassistant", + "update_entity", + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) + + await async_setup_component(hass, "update", {}) + + await hass.services.async_call( + "update", + "install", + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, + clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kDownloading, + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("in_progress") + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateStateProgress, + 50, + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("in_progress") == 50 + + set_node_attribute_typed( + updateable_node, + 0, + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState, + clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum.kIdle, + ) + set_node_attribute_typed( + updateable_node, + 0, + clusters.BasicInformation.Attributes.SoftwareVersion, + 2, + ) + set_node_attribute_typed( + updateable_node, + 0, + clusters.BasicInformation.Attributes.SoftwareVersionString, + "v2.0", + ) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("update.mock_dimmable_light") + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v2.0" diff --git a/tests/components/mealie/conftest.py b/tests/components/mealie/conftest.py index 9bda9e3c46d..ba42d16e56e 100644 --- a/tests/components/mealie/conftest.py +++ b/tests/components/mealie/conftest.py @@ -1,11 +1,20 @@ """Mealie tests configuration.""" +from collections.abc import Generator from unittest.mock import patch -from aiomealie import Mealplan, MealplanResponse, UserInfo +from aiomealie import ( + About, + Mealplan, + MealplanResponse, + Recipe, + ShoppingItemsResponse, + ShoppingListsResponse, + Statistics, + UserInfo, +) from mashumaro.codecs.orjson import ORJSONDecoder import pytest -from typing_extensions import Generator from homeassistant.components.mealie.const import DOMAIN from homeassistant.const import CONF_API_TOKEN, CONF_HOST @@ -13,6 +22,9 @@ from homeassistant.const import CONF_API_TOKEN, CONF_HOST from tests.common import MockConfigEntry, load_fixture from tests.components.smhi.common import AsyncMock +SHOPPING_LIST_ID = "list-id-1" +SHOPPING_ITEM_NOTE = "Shopping Item 1" + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -29,7 +41,7 @@ def mock_mealie_client() -> Generator[AsyncMock]: """Mock a Mealie client.""" with ( patch( - "homeassistant.components.mealie.coordinator.MealieClient", + "homeassistant.components.mealie.MealieClient", autospec=True, ) as mock_client, patch( @@ -47,6 +59,24 @@ def mock_mealie_client() -> Generator[AsyncMock]: client.get_user_info.return_value = UserInfo.from_json( load_fixture("users_self.json", DOMAIN) ) + client.get_about.return_value = About.from_json( + load_fixture("about.json", DOMAIN) + ) + recipe = Recipe.from_json(load_fixture("get_recipe.json", DOMAIN)) + client.get_recipe.return_value = recipe + client.import_recipe.return_value = recipe + client.get_shopping_lists.return_value = ShoppingListsResponse.from_json( + load_fixture("get_shopping_lists.json", DOMAIN) + ) + client.get_shopping_items.return_value = ShoppingItemsResponse.from_json( + load_fixture("get_shopping_items.json", DOMAIN) + ) + client.get_statistics.return_value = Statistics.from_json( + load_fixture("statistics.json", DOMAIN) + ) + mealplan = Mealplan.from_json(load_fixture("mealplan.json", DOMAIN)) + client.random_mealplan.return_value = mealplan + client.set_mealplan.return_value = mealplan yield client diff --git a/tests/components/mealie/fixtures/about.json b/tests/components/mealie/fixtures/about.json new file mode 100644 index 00000000000..86f74ec66d6 --- /dev/null +++ b/tests/components/mealie/fixtures/about.json @@ -0,0 +1,3 @@ +{ + "version": "v1.10.2" +} diff --git a/tests/components/mealie/fixtures/get_recipe.json b/tests/components/mealie/fixtures/get_recipe.json new file mode 100644 index 00000000000..a5ccd1876e5 --- /dev/null +++ b/tests/components/mealie/fixtures/get_recipe.json @@ -0,0 +1,266 @@ +{ + "id": "fada9582-709b-46aa-b384-d5952123ad93", + "userId": "bf1c62fe-4941-4332-9886-e54e88dbdba0", + "groupId": "24477569-f6af-4b53-9e3f-6d04b0ca6916", + "name": "Original Sacher-Torte (2)", + "slug": "original-sacher-torte-2", + "image": "SuPW", + "recipeYield": "4 servings", + "totalTime": "2 hours 30 minutes", + "prepTime": "1 hour 30 minutes", + "cookTime": null, + "performTime": "1 hour", + "description": "The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”", + "recipeCategory": [], + "tags": [ + { + "id": "1b5789b9-3af6-412e-8c77-8a01caa0aac9", + "name": "Sacher", + "slug": "sacher" + }, + { + "id": "1cf17f96-58b5-4bd3-b1e8-1606a64b413d", + "name": "Cake", + "slug": "cake" + }, + { + "id": "3f5f0a3d-728f-440d-a6c7-5a68612e8c67", + "name": "Torte", + "slug": "torte" + }, + { + "id": "525f388d-6ee0-4ebe-91fc-dd320a7583f0", + "name": "Sachertorte", + "slug": "sachertorte" + }, + { + "id": "544a6e08-a899-4f63-9c72-bb2924df70cb", + "name": "Sacher Torte Cake", + "slug": "sacher-torte-cake" + }, + { + "id": "576c0a82-84ee-4e50-a14e-aa7a675b6352", + "name": "Sacher Torte", + "slug": "sacher-torte" + }, + { + "id": "d530b8e4-275a-4093-804b-6d0de154c206", + "name": "Original Sachertorte", + "slug": "original-sachertorte" + } + ], + "tools": [], + "rating": null, + "orgURL": "https://www.sacher.com/en/original-sacher-torte/recipe/", + "dateAdded": "2024-06-29", + "dateUpdated": "2024-06-29T06:10:34.412665", + "createdAt": "2024-06-29T06:10:34.414927", + "updateAt": "2024-06-29T06:10:34.414928", + "lastMade": null, + "recipeIngredient": [ + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "130g dark couverture chocolate (min. 55% cocoa content)", + "isFood": true, + "disableAmount": false, + "display": "1 130g dark couverture chocolate (min. 55% cocoa content)", + "title": null, + "originalText": null, + "referenceId": "a3adfe78-d157-44d8-98be-9c133e45bb4e" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "1 Vanilla Pod", + "isFood": true, + "disableAmount": false, + "display": "1 1 Vanilla Pod", + "title": null, + "originalText": null, + "referenceId": "41d234d7-c040-48f9-91e6-f4636aebb77b" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "150g softened butter", + "isFood": true, + "disableAmount": false, + "display": "1 150g softened butter", + "title": null, + "originalText": null, + "referenceId": "f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "100g Icing sugar", + "isFood": true, + "disableAmount": false, + "display": "1 100g Icing sugar", + "title": null, + "originalText": null, + "referenceId": "f7fcd86e-b04b-4e07-b69c-513925811491" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "6 Eggs", + "isFood": true, + "disableAmount": false, + "display": "1 6 Eggs", + "title": null, + "originalText": null, + "referenceId": "a831fbc3-e2f5-452e-a745-450be8b4a130" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "100g Castor sugar", + "isFood": true, + "disableAmount": false, + "display": "1 100g Castor sugar", + "title": null, + "originalText": null, + "referenceId": "b5ee4bdc-0047-4de7-968b-f3360bbcb31e" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "140g Plain wheat flour", + "isFood": true, + "disableAmount": false, + "display": "1 140g Plain wheat flour", + "title": null, + "originalText": null, + "referenceId": "a67db09d-429c-4e77-919d-cfed3da675ad" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "200g apricot jam", + "isFood": true, + "disableAmount": false, + "display": "1 200g apricot jam", + "title": null, + "originalText": null, + "referenceId": "55479752-c062-4b25-aae3-2b210999d7b9" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "200g castor sugar", + "isFood": true, + "disableAmount": false, + "display": "1 200g castor sugar", + "title": null, + "originalText": null, + "referenceId": "ff9cd404-24ec-4d38-b0aa-0120ce1df679" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "150g dark couverture chocolate (min. 55% cocoa content)", + "isFood": true, + "disableAmount": false, + "display": "1 150g dark couverture chocolate (min. 55% cocoa content)", + "title": null, + "originalText": null, + "referenceId": "c7fca92e-971e-4728-a227-8b04783583ed" + }, + { + "quantity": 1.0, + "unit": null, + "food": null, + "note": "Unsweetend whipped cream to garnish", + "isFood": true, + "disableAmount": false, + "display": "1 Unsweetend whipped cream to garnish", + "title": null, + "originalText": null, + "referenceId": "ef023f23-7816-4871-87f6-4d29f9a283f7" + } + ], + "recipeInstructions": [ + { + "id": "2d558dbf-5361-4ef2-9d86-4161f5eb6146", + "title": "", + "text": "Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.", + "ingredientReferences": [] + }, + { + "id": "dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a", + "title": "", + "text": "Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.", + "ingredientReferences": [] + }, + { + "id": "2265bd14-a691-40b1-9fe6-7b5dfeac8401", + "title": "", + "text": "Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.", + "ingredientReferences": [] + }, + { + "id": "0aade447-dfac-4aae-8e67-ac250ad13ae2", + "title": "", + "text": "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + "ingredientReferences": [] + }, + { + "id": "5fdcb703-7103-468d-a65d-a92460b92eb3", + "title": "", + "text": "Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.", + "ingredientReferences": [] + }, + { + "id": "81474afc-b44e-49b3-bb67-5d7dab8f832a", + "title": "", + "text": "Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.", + "ingredientReferences": [] + }, + { + "id": "8fac8aee-0d3c-4f78-9ff8-56d20472e5f1", + "title": "", + "text": "To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).", + "ingredientReferences": [] + }, + { + "id": "7162e099-d651-4656-902a-a09a9b40c4e1", + "title": "", + "text": "Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.", + "ingredientReferences": [] + } + ], + "nutrition": { + "calories": "400", + "fatContent": "17", + "proteinContent": null, + "carbohydrateContent": null, + "fiberContent": null, + "sodiumContent": null, + "sugarContent": null + }, + "settings": { + "public": true, + "showNutrition": true, + "showAssets": true, + "landscapeView": false, + "disableComments": false, + "disableAmount": false, + "locked": false + }, + "assets": [], + "notes": [], + "extras": {}, + "comments": [] +} diff --git a/tests/components/mealie/fixtures/get_shopping_items.json b/tests/components/mealie/fixtures/get_shopping_items.json new file mode 100644 index 00000000000..1016440816b --- /dev/null +++ b/tests/components/mealie/fixtures/get_shopping_items.json @@ -0,0 +1,108 @@ +{ + "page": 1, + "per_page": 1000, + "total": 3, + "total_pages": 1, + "items": [ + { + "quantity": 2.0, + "unit": null, + "food": null, + "note": "Apples", + "isFood": false, + "disableAmount": true, + "display": "2 Apples", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 0, + "foodId": null, + "labelId": null, + "unitId": null, + "extras": {}, + "id": "f45430f7-3edf-45a9-a50f-73bb375090be", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T10:45:03.362623", + "updateAt": "2024-06-25T11:57:22.412650" + }, + { + "quantity": 1.0, + "unit": { + "id": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", + "name": "can", + "pluralName": null, + "description": "", + "extras": {}, + "fraction": true, + "abbreviation": "", + "pluralAbbreviation": "", + "useAbbreviation": false, + "aliases": [], + "createdAt": "2024-05-14T14:45:02.464122", + "updateAt": "2024-05-14T14:45:02.464124" + }, + "food": { + "id": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", + "name": "acorn squash", + "pluralName": null, + "description": "", + "extras": {}, + "labelId": null, + "aliases": [], + "label": null, + "createdAt": "2024-05-14T14:45:04.454134", + "updateAt": "2024-05-14T14:45:04.454141" + }, + "note": "", + "isFood": true, + "disableAmount": false, + "display": "1 can acorn squash", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 1, + "foodId": "09322430-d24c-4b1a-abb6-22b6ed3a88f5", + "labelId": null, + "unitId": "7bf539d4-fc78-48bc-b48e-c35ccccec34a", + "extras": {}, + "id": "84d8fd74-8eb0-402e-84b6-71f251bfb7cc", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T10:45:14.547922", + "updateAt": "2024-06-25T10:45:14.547925" + }, + { + "quantity": 0.0, + "unit": null, + "food": { + "id": "96801494-4e26-4148-849a-8155deb76327", + "name": "aubergine", + "pluralName": null, + "description": "", + "extras": {}, + "labelId": null, + "aliases": [], + "label": null, + "createdAt": "2024-05-14T14:45:03.868792", + "updateAt": "2024-05-14T14:45:03.868794" + }, + "note": "", + "isFood": true, + "disableAmount": false, + "display": "aubergine", + "shoppingListId": "9ce096fe-ded2-4077-877d-78ba450ab13e", + "checked": false, + "position": 2, + "foodId": "96801494-4e26-4148-849a-8155deb76327", + "labelId": null, + "unitId": null, + "extras": {}, + "id": "69913b9a-7c75-4935-abec-297cf7483f88", + "label": null, + "recipeReferences": [], + "createdAt": "2024-06-25T11:56:59.656699", + "updateAt": "2024-06-25T11:56:59.656701" + } + ], + "next": null, + "previous": null +} diff --git a/tests/components/mealie/fixtures/get_shopping_lists.json b/tests/components/mealie/fixtures/get_shopping_lists.json new file mode 100644 index 00000000000..7b7ba0aaa7a --- /dev/null +++ b/tests/components/mealie/fixtures/get_shopping_lists.json @@ -0,0 +1,838 @@ +{ + "page": 1, + "per_page": 50, + "total": 3, + "total_pages": 1, + "items": [ + { + "name": "Supermarket", + "extras": {}, + "createdAt": "2024-06-17T11:01:54.267314", + "updateAt": "2024-06-22T10:22:13.555389", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "27edbaab-2ec6-441f-8490-0283ea77585f", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "ad5f48b0-5b26-4c2d-a2aa-79b0beae1e42", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "c9b8289a-6693-4bec-9841-d7d08c3b240b", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "9be06f8a-6c23-476b-a8cc-334884bcdd40", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "47bc36ae-1ee4-40be-ad68-ad8662c26cae", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "ad41f42c-08c3-49ef-8b96-dc1740ec95b6", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "5514842f-8c05-4003-a42d-7a5a70d80148", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "0465a139-6571-4599-836b-a562afc95536", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "8d85fe1b-ec4d-49d0-aecc-15f9dbc66fd0", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "b6980720-bd88-4703-a115-50c0b915f607", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "5d69d13c-5d7f-45af-9ecc-045ca914f7ca", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "a5e65ce7-3588-412b-a118-2fe1a2ca0104", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "9890d86a-98e9-4599-8daf-82d341ef1e8d", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "18fc0f39-3e45-412f-afa7-7eb779f7bfdf", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "4cd55de7-7c2e-4078-8c61-87d40b33ebda", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "21c55b4a-c1b1-44c0-962e-040bbfa5e148", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "b295a6be-1437-4415-92bb-4eee21d3195d", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "d3ae533f-c1a8-4f08-8a0f-a88914b2c84b", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "572dbf60-4308-499e-ad7c-d806462ee501", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "5321b4d8-3aba-4a64-95b2-03ac533dda32", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "98aebebf-27fe-4834-b3d3-0e45201a182f", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "3e3aa706-3008-4280-b332-a7d2c31cf683", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "27edbaab-2ec6-441f-8490-0283ea77585f", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "48f109ca-c57a-4828-98ab-a2db1e6514c6", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + }, + { + "name": "Special groceries", + "extras": {}, + "createdAt": "2024-06-07T07:17:05.479808", + "updateAt": "2024-06-12T08:44:58.831239", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "1a5dc45b-e6ae-4db2-bd2f-fa3c07efedeb", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "d1594c9d-f1b6-4160-a4eb-0686499a40ea", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "077106d0-5c85-493c-ae6b-dea06002c824", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "bf66b7e8-3758-4f9e-9e13-c7b9ff564889", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "bb34f741-10b4-490a-a512-67bbd374427c", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "d88b23a5-e397-4cf2-b527-d8982ecf89e0", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "82d44804-5bef-4cc3-9d1f-0d8e879783c0", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "0ae70dde-7403-408f-a6c6-c19b8c0f6a4d", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "7667a581-8d63-4785-a013-8e164994dfc4", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "749c8cbd-c4e5-4879-bce1-40c3b62ada71", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "e7979797-7679-47be-b14f-5fdcfe1c987d", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "1a9b6d19-d8b5-41a0-8e75-548c36fc0b1b", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "0df24ff7-1767-46a1-9841-97f816079580", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "761b5985-9f49-450b-a33c-5b85366501da", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "cd993b6c-2c06-40b3-8fe2-8f9613d29b8e", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "9c9f8e0d-a9e8-4503-ad98-ee7039ec6eec", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "f2a1fa92-1ee3-47b5-9d5f-1ac21e0d6bf3", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "bf2eb5db-bf88-44bc-a83f-7c69c38fc03f", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "14f5ca34-fcec-4847-8ee7-71b29488dc5b", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "197f3d41-27a6-4782-a78d-60ea582108c8", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "b5021331-2004-4570-a2bb-c6f364787bcc", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "f8438635-8211-4be8-80d0-0aa42e37a5f2", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "98e9ecff-d650-4717-96fe-d7744258bf43", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + }, + { + "name": "Freezer", + "extras": {}, + "createdAt": "2024-06-05T09:49:00.404632", + "updateAt": "2024-06-23T08:21:51.764793", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "userId": "90b03954-00e1-46de-9520-f0305022b84f", + "id": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "recipeReferences": [], + "labelSettings": [ + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "0f63545a-606a-47ea-a784-452d45de6158", + "position": 0, + "id": "666b5b98-dcf6-4121-a5a6-2782f06f5f7e", + "label": { + "name": "Alcohol", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0f63545a-606a-47ea-a784-452d45de6158" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "0c2d6111-9837-4319-acb5-490a32979993", + "position": 1, + "id": "6d25fc7e-33d2-459c-ba14-7e0aaf30a522", + "label": { + "name": "Baked Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "0c2d6111-9837-4319-acb5-490a32979993" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "3922802c-8e8c-47d4-9c68-e60b0a1338b6", + "position": 2, + "id": "56402a4e-c94e-4480-9f68-87370dbda209", + "label": { + "name": "Beverages", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3922802c-8e8c-47d4-9c68-e60b0a1338b6" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "4111bfff-d834-4e8c-88ed-5eff761e06db", + "position": 3, + "id": "743e9e2b-a13a-4d80-b203-431d1c23f691", + "label": { + "name": "Canned Goods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "4111bfff-d834-4e8c-88ed-5eff761e06db" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "24fa2836-25e8-44af-b497-ad0d428a7f78", + "position": 4, + "id": "93b46c6e-0542-4adf-ad9d-8942b47dd9e3", + "label": { + "name": "Condiments", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "24fa2836-25e8-44af-b497-ad0d428a7f78" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "20a735de-c75b-4fdb-abaf-b8d71ef192f8", + "position": 5, + "id": "8c6f20ff-a5e3-4c64-a1ff-aa07bbdd455a", + "label": { + "name": "Confectionary", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "20a735de-c75b-4fdb-abaf-b8d71ef192f8" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "71178428-70aa-4491-b5b4-b8d93e7b04cf", + "position": 6, + "id": "02995d80-108f-4949-bd58-d04d670b388d", + "label": { + "name": "Dairy Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "71178428-70aa-4491-b5b4-b8d93e7b04cf" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "c58ed864-b5bf-4aac-88a1-007833c706c7", + "position": 7, + "id": "b20c178c-e719-4159-b199-91a6dd25dcd3", + "label": { + "name": "Frozen Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c58ed864-b5bf-4aac-88a1-007833c706c7" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "f398f1a4-ce53-42df-95d4-8a3403bb6a38", + "position": 8, + "id": "5ff12e47-9b84-46d2-aabf-da4165a68f65", + "label": { + "name": "Fruits", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "f398f1a4-ce53-42df-95d4-8a3403bb6a38" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "fd936065-3d53-4844-99df-9332f1bf0c8a", + "position": 9, + "id": "e0ec7da9-c0b8-4d78-a5b8-591c99d87370", + "label": { + "name": "Grains", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "fd936065-3d53-4844-99df-9332f1bf0c8a" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf", + "position": 10, + "id": "3dc2d2e7-274e-40ec-8ba1-09ce1820b29b", + "label": { + "name": "Health Foods", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "db7b685a-4aeb-4ebd-9b64-0c14827d9eaf" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b", + "position": 11, + "id": "e30fa937-4bb1-4ff9-b163-2da67e2749ca", + "label": { + "name": "Household", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "28bbdab4-7eab-4fb2-b0e1-b0f2c10e489b" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "cf136576-1929-4fc9-a3da-34c49ff58920", + "position": 12, + "id": "ecd715af-fafe-4d32-a376-538e476bf215", + "label": { + "name": "Meat", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf136576-1929-4fc9-a3da-34c49ff58920" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa", + "position": 13, + "id": "5ded867c-473f-456d-b0a0-83cae279df71", + "label": { + "name": "Meat Products", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "5b7d69d0-4d9f-48f9-96f1-8cb843227baa" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "2a035661-fd5d-462c-8eb0-6b78af982e0c", + "position": 14, + "id": "eb88d477-cd50-4b84-a1bb-5adc077d38e5", + "label": { + "name": "Other", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "2a035661-fd5d-462c-8eb0-6b78af982e0c" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "af147838-d114-4a92-bd0f-08f05f59bbe5", + "position": 15, + "id": "ab7e96e3-f8d5-4e4e-91ee-b966bd980cf0", + "label": { + "name": "Produce", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "af147838-d114-4a92-bd0f-08f05f59bbe5" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "cf7672b8-036a-45a4-8323-6a167d2731be", + "position": 16, + "id": "3fcf5e5a-f8e2-4174-be79-2496a1cb505a", + "label": { + "name": "Regular", + "color": "#2E7D32FF", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "cf7672b8-036a-45a4-8323-6a167d2731be" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18", + "position": 17, + "id": "e768c9e7-c568-44d1-a263-081d93fd1298", + "label": { + "name": "Seafood", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "bbcfaf8b-02e6-4c3d-98a6-6863b36bef18" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "1c59a263-227a-4f43-a450-d53ca1485b36", + "position": 18, + "id": "f8a78147-c6d1-4a86-b159-5f178ae72089", + "label": { + "name": "Snacks", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "1c59a263-227a-4f43-a450-d53ca1485b36" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "189099a9-0033-4783-804a-ec6805e7d557", + "position": 19, + "id": "23253f2f-bc71-4ecf-837c-d1697738b505", + "label": { + "name": "Spices", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "189099a9-0033-4783-804a-ec6805e7d557" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "c28efdde-5993-4044-b824-f111f3a118ef", + "position": 20, + "id": "706d656b-3755-46f7-8c12-c9196730baf2", + "label": { + "name": "Sweets", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "c28efdde-5993-4044-b824-f111f3a118ef" + } + }, + { + "shoppingListId": "e9d78ff2-4b23-4b77-a3a8-464827100b46", + "labelId": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c", + "position": 21, + "id": "d9d60d8d-f2de-4636-864f-d7262e24ead3", + "label": { + "name": "Vegetables", + "color": "#E0E0E0", + "groupId": "9ed7c880-3e85-4955-9318-1443d6e080fe", + "id": "3f151d15-27f9-41c7-9dfc-2ae1024b1c7c" + } + } + ] + } + ], + "next": null, + "previous": null +} diff --git a/tests/components/mealie/fixtures/mealplan.json b/tests/components/mealie/fixtures/mealplan.json new file mode 100644 index 00000000000..b540280d83f --- /dev/null +++ b/tests/components/mealie/fixtures/mealplan.json @@ -0,0 +1,34 @@ +{ + "date": "2024-01-22", + "entryType": "dinner", + "title": "", + "text": "", + "recipeId": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", + "id": 230, + "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", + "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", + "recipe": { + "id": "c5f00a93-71a2-4e48-900f-d9ad0bb9de93", + "userId": "1ce8b5fe-04e8-4b80-aab1-d92c94685c6d", + "groupId": "0bf60b2e-ca89-42a9-94d4-8f67ca72b157", + "name": "Zoete aardappel curry traybake", + "slug": "zoete-aardappel-curry-traybake", + "image": "AiIo", + "recipeYield": "2 servings", + "totalTime": "40 Minutes", + "prepTime": null, + "cookTime": null, + "performTime": null, + "description": "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + "recipeCategory": [], + "tags": [], + "tools": [], + "rating": null, + "orgURL": "https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/", + "dateAdded": "2024-01-22", + "dateUpdated": "2024-01-22T00:27:46.324512", + "createdAt": "2024-01-22T00:27:46.327546", + "updateAt": "2024-01-22T00:27:46.327548", + "lastMade": null + } +} diff --git a/tests/components/mealie/fixtures/statistics.json b/tests/components/mealie/fixtures/statistics.json new file mode 100644 index 00000000000..350bf1fd9ff --- /dev/null +++ b/tests/components/mealie/fixtures/statistics.json @@ -0,0 +1,7 @@ +{ + "totalRecipes": 765, + "totalUsers": 3, + "totalCategories": 24, + "totalTags": 454, + "totalTools": 11 +} diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e6c72c950cc --- /dev/null +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -0,0 +1,505 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'about': dict({ + 'version': 'v1.10.2', + }), + 'mealplans': dict({ + 'breakfast': list([ + dict({ + 'description': None, + 'entry_type': 'breakfast', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 229, + 'recipe': dict({ + 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'JeQ2', + 'name': 'Roast Chicken', + 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', + 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', + 'recipe_yield': '6 servings', + 'slug': 'roast-chicken', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'dinner': list([ + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 222, + 'recipe': dict({ + 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'En9o', + 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', + 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', + 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', + 'recipe_yield': '6 servings', + 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 221, + 'recipe': dict({ + 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'Kn62', + 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', + 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', + 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', + 'recipe_yield': '4 servings', + 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 219, + 'recipe': dict({ + 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'ibL6', + 'name': 'Pampered Chef Double Chocolate Mocha Trifle', + 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', + 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', + 'recipe_yield': '12 servings', + 'slug': 'pampered-chef-double-chocolate-mocha-trifle', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 217, + 'recipe': dict({ + 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'beGq', + 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', + 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', + 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', + 'recipe_yield': '24 servings', + 'slug': 'cheeseburger-sliders-easy-30-min-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 212, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 211, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 196, + 'recipe': dict({ + 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '5G1v', + 'name': 'Miso Udon Noodles with Spinach and Tofu', + 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', + 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', + 'recipe_yield': '2 servings', + 'slug': 'miso-udon-noodles-with-spinach-and-tofu', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'dinner', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 195, + 'recipe': dict({ + 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'rrNL', + 'name': 'Mousse de saumon', + 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', + 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', + 'recipe_yield': '12 servings', + 'slug': 'mousse-de-saumon', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': 'Dineren met de boys', + 'entry_type': 'dinner', + 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-21', + }), + 'mealplan_id': 1, + 'recipe': None, + 'title': 'Aquavite', + 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', + }), + ]), + 'lunch': list([ + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 226, + 'recipe': dict({ + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'INQz', + 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', + 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', + 'recipe_yield': '2 servings', + 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 224, + 'recipe': dict({ + 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nj5M', + 'name': 'Boeuf bourguignon : la vraie recette (2)', + 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', + 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', + 'recipe_yield': '4 servings', + 'slug': 'boeuf-bourguignon-la-vraie-recette-2', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': 'lunch', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-22', + }), + 'mealplan_id': 216, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + 'side': list([ + dict({ + 'description': None, + 'entry_type': 'side', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': dict({ + '__type': "", + 'isoformat': '2024-01-23', + }), + 'mealplan_id': 220, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + ]), + }), + 'shoppinglist': dict({ + '27edbaab-2ec6-441f-8490-0283ea77585f': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': '27edbaab-2ec6-441f-8490-0283ea77585f', + 'name': 'Supermarket', + }), + }), + 'e9d78ff2-4b23-4b77-a3a8-464827100b46': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'e9d78ff2-4b23-4b77-a3a8-464827100b46', + 'name': 'Freezer', + }), + }), + 'f8438635-8211-4be8-80d0-0aa42e37a5f2': dict({ + 'items': list([ + dict({ + 'checked': False, + 'disable_amount': True, + 'display': '2 Apples', + 'food_id': None, + 'is_food': False, + 'item_id': 'f45430f7-3edf-45a9-a50f-73bb375090be', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': 'Apples', + 'position': 0, + 'quantity': 2.0, + 'unit_id': None, + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': '1 can acorn squash', + 'food_id': '09322430-d24c-4b1a-abb6-22b6ed3a88f5', + 'is_food': True, + 'item_id': '84d8fd74-8eb0-402e-84b6-71f251bfb7cc', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 1, + 'quantity': 1.0, + 'unit_id': '7bf539d4-fc78-48bc-b48e-c35ccccec34a', + }), + dict({ + 'checked': False, + 'disable_amount': False, + 'display': 'aubergine', + 'food_id': '96801494-4e26-4148-849a-8155deb76327', + 'is_food': True, + 'item_id': '69913b9a-7c75-4935-abec-297cf7483f88', + 'label_id': None, + 'list_id': '9ce096fe-ded2-4077-877d-78ba450ab13e', + 'note': '', + 'position': 2, + 'quantity': 0.0, + 'unit_id': None, + }), + ]), + 'shopping_list': dict({ + 'list_id': 'f8438635-8211-4be8-80d0-0aa42e37a5f2', + 'name': 'Special groceries', + }), + }), + }), + }) +# --- diff --git a/tests/components/mealie/snapshots/test_init.ambr b/tests/components/mealie/snapshots/test_init.ambr index 8f800676945..98ca52dd15e 100644 --- a/tests/components/mealie/snapshots/test_init.ambr +++ b/tests/components/mealie/snapshots/test_init.ambr @@ -21,12 +21,13 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Mealie', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': None, + 'sw_version': 'v1.10.2', 'via_device_id': None, }) # --- diff --git a/tests/components/mealie/snapshots/test_sensor.ambr b/tests/components/mealie/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..e645cf4c45f --- /dev/null +++ b/tests/components/mealie/snapshots/test_sensor.ambr @@ -0,0 +1,251 @@ +# serializer version: 1 +# name: test_entities[sensor.mealie_categories-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_categories', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Categories', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'categories', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_categories', + 'unit_of_measurement': 'categories', + }) +# --- +# name: test_entities[sensor.mealie_categories-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Categories', + 'state_class': , + 'unit_of_measurement': 'categories', + }), + 'context': , + 'entity_id': 'sensor.mealie_categories', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- +# name: test_entities[sensor.mealie_recipes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_recipes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Recipes', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'recipes', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_recipes', + 'unit_of_measurement': 'recipes', + }) +# --- +# name: test_entities[sensor.mealie_recipes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Recipes', + 'state_class': , + 'unit_of_measurement': 'recipes', + }), + 'context': , + 'entity_id': 'sensor.mealie_recipes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '765', + }) +# --- +# name: test_entities[sensor.mealie_tags-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_tags', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tags', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tags', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tags', + 'unit_of_measurement': 'tags', + }) +# --- +# name: test_entities[sensor.mealie_tags-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Tags', + 'state_class': , + 'unit_of_measurement': 'tags', + }), + 'context': , + 'entity_id': 'sensor.mealie_tags', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '454', + }) +# --- +# name: test_entities[sensor.mealie_tools-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_tools', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Tools', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'tools', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_tools', + 'unit_of_measurement': 'tools', + }) +# --- +# name: test_entities[sensor.mealie_tools-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Tools', + 'state_class': , + 'unit_of_measurement': 'tools', + }), + 'context': , + 'entity_id': 'sensor.mealie_tools', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '11', + }) +# --- +# name: test_entities[sensor.mealie_users-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mealie_users', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Users', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'users', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_users', + 'unit_of_measurement': 'users', + }) +# --- +# name: test_entities[sensor.mealie_users-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Users', + 'state_class': , + 'unit_of_measurement': 'users', + }), + 'context': , + 'entity_id': 'sensor.mealie_users', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr new file mode 100644 index 00000000000..3ae158f1d2d --- /dev/null +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -0,0 +1,749 @@ +# serializer version: 1 +# name: test_service_import_recipe + dict({ + 'recipe': dict({ + 'date_added': datetime.date(2024, 6, 29), + 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', + 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'image': 'SuPW', + 'ingredients': list([ + dict({ + 'is_food': True, + 'note': '130g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '1 Vanilla Pod', + 'quantity': 1.0, + 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g softened butter', + 'quantity': 1.0, + 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Icing sugar', + 'quantity': 1.0, + 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '6 Eggs', + 'quantity': 1.0, + 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Castor sugar', + 'quantity': 1.0, + 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '140g Plain wheat flour', + 'quantity': 1.0, + 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g apricot jam', + 'quantity': 1.0, + 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g castor sugar', + 'quantity': 1.0, + 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': 'Unsweetend whipped cream to garnish', + 'quantity': 1.0, + 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', + 'unit': None, + }), + ]), + 'instructions': list([ + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', + 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', + 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', + 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', + 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', + 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', + 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', + 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', + 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', + 'title': None, + }), + ]), + 'name': 'Original Sacher-Torte (2)', + 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', + 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', + 'recipe_yield': '4 servings', + 'slug': 'original-sacher-torte-2', + 'tags': list([ + dict({ + 'name': 'Sacher', + 'slug': 'sacher', + 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', + }), + dict({ + 'name': 'Cake', + 'slug': 'cake', + 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', + }), + dict({ + 'name': 'Torte', + 'slug': 'torte', + 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', + }), + dict({ + 'name': 'Sachertorte', + 'slug': 'sachertorte', + 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', + }), + dict({ + 'name': 'Sacher Torte Cake', + 'slug': 'sacher-torte-cake', + 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', + }), + dict({ + 'name': 'Sacher Torte', + 'slug': 'sacher-torte', + 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', + }), + dict({ + 'name': 'Original Sachertorte', + 'slug': 'original-sachertorte', + 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', + }), + ]), + 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', + }), + }) +# --- +# name: test_service_mealplan + dict({ + 'mealplan': list([ + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 229, + 'recipe': dict({ + 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'JeQ2', + 'name': 'Roast Chicken', + 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', + 'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1', + 'recipe_yield': '6 servings', + 'slug': 'roast-chicken', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 226, + 'recipe': dict({ + 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'INQz', + 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', + 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', + 'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9', + 'recipe_yield': '2 servings', + 'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 224, + 'recipe': dict({ + 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nj5M', + 'name': 'Boeuf bourguignon : la vraie recette (2)', + 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', + 'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a', + 'recipe_yield': '4 servings', + 'slug': 'boeuf-bourguignon-la-vraie-recette-2', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 222, + 'recipe': dict({ + 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'En9o', + 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', + 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', + 'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34', + 'recipe_yield': '6 servings', + 'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 221, + 'recipe': dict({ + 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'Kn62', + 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', + 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', + 'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d', + 'recipe_yield': '4 servings', + 'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 220, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 219, + 'recipe': dict({ + 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'ibL6', + 'name': 'Pampered Chef Double Chocolate Mocha Trifle', + 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', + 'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f', + 'recipe_yield': '12 servings', + 'slug': 'pampered-chef-double-chocolate-mocha-trifle', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 217, + 'recipe': dict({ + 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'beGq', + 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', + 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', + 'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22', + 'recipe_yield': '24 servings', + 'slug': 'cheeseburger-sliders-easy-30-min-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 216, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 212, + 'recipe': dict({ + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '356X', + 'name': 'All-American Beef Stew Recipe', + 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', + 'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e', + 'recipe_yield': '6 servings', + 'slug': 'all-american-beef-stew-recipe', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 211, + 'recipe': dict({ + 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'nOPT', + 'name': 'Einfacher Nudelauflauf mit Brokkoli', + 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', + 'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159', + 'recipe_yield': '4 servings', + 'slug': 'einfacher-nudelauflauf-mit-brokkoli', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 23), + 'mealplan_id': 196, + 'recipe': dict({ + 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': '5G1v', + 'name': 'Miso Udon Noodles with Spinach and Tofu', + 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', + 'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317', + 'recipe_yield': '2 servings', + 'slug': 'miso-udon-noodles-with-spinach-and-tofu', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': FakeDate(2024, 1, 22), + 'mealplan_id': 195, + 'recipe': dict({ + 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'rrNL', + 'name': 'Mousse de saumon', + 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', + 'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb', + 'recipe_yield': '12 servings', + 'slug': 'mousse-de-saumon', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + dict({ + 'description': 'Dineren met de boys', + 'entry_type': , + 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'mealplan_date': FakeDate(2024, 1, 21), + 'mealplan_id': 1, + 'recipe': None, + 'title': 'Aquavite', + 'user_id': '6caa6e4d-521f-4ef4-9ed7-388bdd63f47d', + }), + ]), + }) +# --- +# name: test_service_recipe + dict({ + 'recipe': dict({ + 'date_added': datetime.date(2024, 6, 29), + 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', + 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'image': 'SuPW', + 'ingredients': list([ + dict({ + 'is_food': True, + 'note': '130g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'a3adfe78-d157-44d8-98be-9c133e45bb4e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '1 Vanilla Pod', + 'quantity': 1.0, + 'reference_id': '41d234d7-c040-48f9-91e6-f4636aebb77b', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g softened butter', + 'quantity': 1.0, + 'reference_id': 'f6ce06bf-8b02-43e6-8316-0dc3fb0da0fc', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Icing sugar', + 'quantity': 1.0, + 'reference_id': 'f7fcd86e-b04b-4e07-b69c-513925811491', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '6 Eggs', + 'quantity': 1.0, + 'reference_id': 'a831fbc3-e2f5-452e-a745-450be8b4a130', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '100g Castor sugar', + 'quantity': 1.0, + 'reference_id': 'b5ee4bdc-0047-4de7-968b-f3360bbcb31e', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '140g Plain wheat flour', + 'quantity': 1.0, + 'reference_id': 'a67db09d-429c-4e77-919d-cfed3da675ad', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g apricot jam', + 'quantity': 1.0, + 'reference_id': '55479752-c062-4b25-aae3-2b210999d7b9', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '200g castor sugar', + 'quantity': 1.0, + 'reference_id': 'ff9cd404-24ec-4d38-b0aa-0120ce1df679', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': '150g dark couverture chocolate (min. 55% cocoa content)', + 'quantity': 1.0, + 'reference_id': 'c7fca92e-971e-4728-a227-8b04783583ed', + 'unit': None, + }), + dict({ + 'is_food': True, + 'note': 'Unsweetend whipped cream to garnish', + 'quantity': 1.0, + 'reference_id': 'ef023f23-7816-4871-87f6-4d29f9a283f7', + 'unit': None, + }), + ]), + 'instructions': list([ + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2d558dbf-5361-4ef2-9d86-4161f5eb6146', + 'text': 'Preheat oven to 170°C. Line the base of a springform with baking paper, grease the sides, and dust with a little flour. Melt couverture over boiling water. Let cool slightly.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': 'dbcc1c37-3cbf-4045-9902-8f7fd1e68f0a', + 'text': 'Slit vanilla pod lengthwise and scrape out seeds. Using a hand mixer with whisks, beat the softened butter with the icing sugar and vanilla seeds until bubbles appear.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '2265bd14-a691-40b1-9fe6-7b5dfeac8401', + 'text': 'Separate the eggs. Whisk the egg yolks into the butter mixture one by one. Now gradually add melted couverture chocolate. Beat the egg whites with the castor sugar until stiff, then place on top of the butter and chocolate mixture. Sift the flour over the mixture, then fold in the flour and beaten egg whites.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '0aade447-dfac-4aae-8e67-ac250ad13ae2', + 'text': "Transfer the mixture to the springform, smooth the top, and bake in the oven (middle rack) for 10–15 minutes, leaving the oven door a finger's width ajar. Then close the oven and bake for approximately 50 minutes. (The cake is done when it yields slightly to the touch.)", + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '5fdcb703-7103-468d-a65d-a92460b92eb3', + 'text': 'Remove the cake from the oven and loosen the sides of the springform. Carefully tip the cake onto a cake rack lined with baking paper and let cool for approximately 20 minutes. Then pull off the baking paper, turn the cake over, and leave on rack to cool completely.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '81474afc-b44e-49b3-bb67-5d7dab8f832a', + 'text': 'Cut the cake in half horizontally. Warm the jam and stir until smooth. Brush the top of both cake halves with the jam and place one on top of the other. Brush the sides with the jam as well.', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '8fac8aee-0d3c-4f78-9ff8-56d20472e5f1', + 'text': 'To make the glaze, put the castor sugar into a saucepan with 125 ml water and boil over high heat for approximately 5 minutes. Take the sugar syrup off the stove and leave to cool a little. Coarsely chop the couverture, gradually adding it to the syrup, and stir until it forms a thick liquid (see tip below).', + 'title': None, + }), + dict({ + 'ingredient_references': list([ + ]), + 'instruction_id': '7162e099-d651-4656-902a-a09a9b40c4e1', + 'text': 'Pour all the lukewarm glaze liquid at once over the top of the cake and quickly spread using a palette knife. Leave the glaze to set for a few hours. Serve garnished with whipped cream.', + 'title': None, + }), + ]), + 'name': 'Original Sacher-Torte (2)', + 'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/', + 'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93', + 'recipe_yield': '4 servings', + 'slug': 'original-sacher-torte-2', + 'tags': list([ + dict({ + 'name': 'Sacher', + 'slug': 'sacher', + 'tag_id': '1b5789b9-3af6-412e-8c77-8a01caa0aac9', + }), + dict({ + 'name': 'Cake', + 'slug': 'cake', + 'tag_id': '1cf17f96-58b5-4bd3-b1e8-1606a64b413d', + }), + dict({ + 'name': 'Torte', + 'slug': 'torte', + 'tag_id': '3f5f0a3d-728f-440d-a6c7-5a68612e8c67', + }), + dict({ + 'name': 'Sachertorte', + 'slug': 'sachertorte', + 'tag_id': '525f388d-6ee0-4ebe-91fc-dd320a7583f0', + }), + dict({ + 'name': 'Sacher Torte Cake', + 'slug': 'sacher-torte-cake', + 'tag_id': '544a6e08-a899-4f63-9c72-bb2924df70cb', + }), + dict({ + 'name': 'Sacher Torte', + 'slug': 'sacher-torte', + 'tag_id': '576c0a82-84ee-4e50-a14e-aa7a675b6352', + }), + dict({ + 'name': 'Original Sachertorte', + 'slug': 'original-sachertorte', + 'tag_id': 'd530b8e4-275a-4093-804b-6d0de154c206', + }), + ]), + 'user_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0', + }), + }) +# --- +# name: test_service_set_mealplan[payload0-kwargs0] + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- +# name: test_service_set_mealplan[payload1-kwargs1] + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- +# name: test_service_set_random_mealplan + dict({ + 'mealplan': dict({ + 'description': None, + 'entry_type': , + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'mealplan_date': datetime.date(2024, 1, 22), + 'mealplan_id': 230, + 'recipe': dict({ + 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", + 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'image': 'AiIo', + 'name': 'Zoete aardappel curry traybake', + 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', + 'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93', + 'recipe_yield': '2 servings', + 'slug': 'zoete-aardappel-curry-traybake', + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + 'title': None, + 'user_id': '1ce8b5fe-04e8-4b80-aab1-d92c94685c6d', + }), + }) +# --- diff --git a/tests/components/mealie/snapshots/test_todo.ambr b/tests/components/mealie/snapshots/test_todo.ambr new file mode 100644 index 00000000000..a580862535e --- /dev/null +++ b/tests/components/mealie/snapshots/test_todo.ambr @@ -0,0 +1,156 @@ +# serializer version: 1 +# name: test_entities[todo.mealie_freezer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_freezer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Freezer', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_e9d78ff2-4b23-4b77-a3a8-464827100b46', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_freezer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Freezer', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_freezer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_entities[todo.mealie_special_groceries-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_special_groceries', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Special groceries', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_f8438635-8211-4be8-80d0-0aa42e37a5f2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_special_groceries-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Special groceries', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_special_groceries', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_entities[todo.mealie_supermarket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.mealie_supermarket', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Supermarket', + 'platform': 'mealie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': 'bf1c62fe-4941-4332-9886-e54e88dbdba0_27edbaab-2ec6-441f-8490-0283ea77585f', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[todo.mealie_supermarket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Supermarket', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_supermarket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- +# name: test_get_todo_list_items + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mealie Supermarket', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.mealie_supermarket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- diff --git a/tests/components/mealie/test_calendar.py b/tests/components/mealie/test_calendar.py index 9df2c1810fd..d11fe5d2354 100644 --- a/tests/components/mealie/test_calendar.py +++ b/tests/components/mealie/test_calendar.py @@ -2,10 +2,11 @@ from datetime import date from http import HTTPStatus -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -40,7 +41,8 @@ async def test_entities( mock_config_entry: MockConfigEntry, ) -> None: """Test the API returns the calendar.""" - await setup_integration(hass, mock_config_entry) + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.CALENDAR]): + await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index 777bb1e4ad1..f2886578744 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -2,15 +2,17 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_API_TOKEN, CONF_HOST +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from . import setup_integration + from tests.common import MockConfigEntry @@ -36,6 +38,7 @@ async def test_full_flow( assert result["data"] == { CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token", + CONF_VERIFY_SSL: True, } assert result["result"].unique_id == "bf1c62fe-4941-4332-9886-e54e88dbdba0" @@ -79,10 +82,42 @@ async def test_flow_errors( result["flow_id"], {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ], +) +async def test_flow_version_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + version, +) -> None: + """Test flow version error.""" + mock_mealie_client.get_about.return_value = About(version=version) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "demo.mealie.io", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "mealie_version"} + + async def test_duplicate( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -106,3 +141,213 @@ async def test_duplicate( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + + +async def test_reauth_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reauth_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reauth flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "http://test:9090", + CONF_API_TOKEN: "token2", + CONF_VERIFY_SSL: False, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + assert mock_config_entry.data[CONF_API_TOKEN] == "token2" + assert mock_config_entry.data[CONF_HOST] == "http://test:9090" + assert mock_config_entry.data[CONF_VERIFY_SSL] is False + + +async def test_reconfigure_flow_wrong_account( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow with wrong account.""" + await setup_integration(hass, mock_config_entry) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + mock_mealie_client.get_user_info.return_value.user_id = "wrong_user_id" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token2"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "wrong_account" + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (MealieConnectionError, "cannot_connect"), + (MealieAuthenticationError, "invalid_auth"), + (Exception, "unknown"), + ], +) +async def test_reconfigure_flow_exceptions( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_setup_entry: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, + error: str, +) -> None: + """Test reconfigure flow errors.""" + await setup_integration(hass, mock_config_entry) + mock_mealie_client.get_user_info.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + data=mock_config_entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + assert result["errors"] == {"base": error} + + mock_mealie_client.get_user_info.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "http://test:9090", CONF_API_TOKEN: "token"}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/mealie/test_diagnostics.py b/tests/components/mealie/test_diagnostics.py new file mode 100644 index 00000000000..88680da9784 --- /dev/null +++ b/tests/components/mealie/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mealie diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/mealie/test_init.py b/tests/components/mealie/test_init.py index 5a7a5387897..a45a67801df 100644 --- a/tests/components/mealie/test_init.py +++ b/tests/components/mealie/test_init.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiomealie import MealieAuthenticationError, MealieConnectionError +from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from syrupy import SnapshotAssertion @@ -32,6 +32,78 @@ async def test_device_info( assert device_entry == snapshot +@pytest.mark.parametrize( + "field", + [ + "get_about", + "get_mealplans", + "get_shopping_lists", + "get_statistics", + ], +) +@pytest.mark.parametrize( + ("exc", "state"), + [ + (MealieConnectionError, ConfigEntryState.SETUP_RETRY), + (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + field: str, + exc: Exception, + state: ConfigEntryState, +) -> None: + """Test setup failure.""" + getattr(mock_mealie_client, field).side_effect = exc + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state + + +@pytest.mark.parametrize( + ("version"), + [ + ("v1.0.0beta-5"), + ("v1.0.0-RC2"), + ("v0.1.0"), + ], +) +async def test_setup_too_old( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + version, +) -> None: + """Test setup of Mealie entry with too old version of Mealie.""" + mock_mealie_client.get_about.return_value = About(version=version) + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_setup_invalid( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup of Mealie entry with too old version of Mealie.""" + mock_mealie_client.get_about.return_value = About(version="nightly") + + await setup_integration(hass, mock_config_entry) + + assert ( + "It seems like you are using the nightly version of Mealie, nightly" + " versions could have changes that stop this integration working" in caplog.text + ) + assert mock_config_entry.state is ConfigEntryState.LOADED + + async def test_load_unload_entry( hass: HomeAssistant, mock_mealie_client: AsyncMock, @@ -55,7 +127,7 @@ async def test_load_unload_entry( (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), ], ) -async def test_initialization_failure( +async def test_mealplan_initialization_failure( hass: HomeAssistant, mock_mealie_client: AsyncMock, mock_config_entry: MockConfigEntry, @@ -68,3 +140,25 @@ async def test_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is state + + +@pytest.mark.parametrize( + ("exc", "state"), + [ + (MealieConnectionError, ConfigEntryState.SETUP_RETRY), + (MealieAuthenticationError, ConfigEntryState.SETUP_ERROR), + ], +) +async def test_shoppingitems_initialization_failure( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + exc: Exception, + state: ConfigEntryState, +) -> None: + """Test initialization failure.""" + mock_mealie_client.get_shopping_items.side_effect = exc + + await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is state diff --git a/tests/components/mealie/test_sensor.py b/tests/components/mealie/test_sensor.py new file mode 100644 index 00000000000..5a55b89ad21 --- /dev/null +++ b/tests/components/mealie/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Mealie sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/mealie/test_services.py b/tests/components/mealie/test_services.py new file mode 100644 index 00000000000..1c8c6f19de7 --- /dev/null +++ b/tests/components/mealie/test_services.py @@ -0,0 +1,446 @@ +"""Tests for the Mealie services.""" + +from datetime import date +from unittest.mock import AsyncMock + +from aiomealie import ( + MealieConnectionError, + MealieNotFoundError, + MealieValidationError, + MealplanEntryType, +) +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.mealie.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_END_DATE, + ATTR_ENTRY_TYPE, + ATTR_INCLUDE_TAGS, + ATTR_NOTE_TEXT, + ATTR_NOTE_TITLE, + ATTR_RECIPE_ID, + ATTR_START_DATE, + ATTR_URL, + DOMAIN, +) +from homeassistant.components.mealie.services import ( + SERVICE_GET_MEALPLAN, + SERVICE_GET_RECIPE, + SERVICE_IMPORT_RECIPE, + SERVICE_SET_MEALPLAN, + SERVICE_SET_RANDOM_MEALPLAN, +) +from homeassistant.const import ATTR_DATE +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_service_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the get_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + freezer.move_to("2023-10-21") + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id}, + blocking=True, + return_response=True, + ) + assert mock_mealie_client.get_mealplans.call_args_list[1][0] == ( + date(2023, 10, 21), + date(2023, 10, 21), + ) + assert response == snapshot + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-25", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[2][0] == ( + date(2023, 10, 22), + date(2023, 10, 25), + ) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-19", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[3][0] == ( + date(2023, 10, 19), + date(2023, 10, 21), + ) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_END_DATE: "2023-10-22", + }, + blocking=True, + return_response=True, + ) + assert response + assert mock_mealie_client.get_mealplans.call_args_list[4][0] == ( + date(2023, 10, 21), + date(2023, 10, 22), + ) + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + DOMAIN, + SERVICE_GET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_START_DATE: "2023-10-22", + ATTR_END_DATE: "2023-10-19", + }, + blocking=True, + return_response=True, + ) + + +async def test_service_recipe( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the get_recipe service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_GET_RECIPE, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, ATTR_RECIPE_ID: "recipe_id"}, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +async def test_service_import_recipe( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the import_recipe service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_IMPORT_RECIPE, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_URL: "http://example.com", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.import_recipe.assert_called_with( + "http://example.com", include_tags=False + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_IMPORT_RECIPE, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_URL: "http://example.com", + ATTR_INCLUDE_TAGS: True, + }, + blocking=True, + return_response=False, + ) + mock_mealie_client.import_recipe.assert_called_with( + "http://example.com", include_tags=True + ) + + +async def test_service_set_random_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the set_random_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_SET_RANDOM_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.random_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH + ) + + mock_mealie_client.random_mealplan.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SET_RANDOM_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + }, + blocking=True, + return_response=False, + ) + mock_mealie_client.random_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH + ) + + +@pytest.mark.parametrize( + ("payload", "kwargs"), + [ + ( + { + ATTR_RECIPE_ID: "recipe_id", + }, + {"recipe_id": "recipe_id", "note_title": None, "note_text": None}, + ), + ( + { + ATTR_NOTE_TITLE: "Note Title", + ATTR_NOTE_TEXT: "Note Text", + }, + {"recipe_id": None, "note_title": "Note Title", "note_text": "Note Text"}, + ), + ], +) +async def test_service_set_mealplan( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + payload: dict[str, str], + kwargs: dict[str, str], +) -> None: + """Test the set_mealplan service.""" + + await setup_integration(hass, mock_config_entry) + + response = await hass.services.async_call( + DOMAIN, + SERVICE_SET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + } + | payload, + blocking=True, + return_response=True, + ) + assert response == snapshot + mock_mealie_client.set_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs + ) + + mock_mealie_client.random_mealplan.reset_mock() + await hass.services.async_call( + DOMAIN, + SERVICE_SET_MEALPLAN, + { + ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id, + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + } + | payload, + blocking=True, + return_response=False, + ) + mock_mealie_client.set_mealplan.assert_called_with( + date(2023, 10, 21), MealplanEntryType.LUNCH, **kwargs + ) + + +@pytest.mark.parametrize( + ("service", "payload", "function", "exception", "raised_exception", "message"), + [ + ( + SERVICE_GET_MEALPLAN, + {}, + "get_mealplans", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_GET_RECIPE, + {ATTR_RECIPE_ID: "recipe_id"}, + "get_recipe", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_GET_RECIPE, + {ATTR_RECIPE_ID: "recipe_id"}, + "get_recipe", + MealieNotFoundError, + ServiceValidationError, + "Recipe with ID or slug `recipe_id` not found", + ), + ( + SERVICE_IMPORT_RECIPE, + {ATTR_URL: "http://example.com"}, + "import_recipe", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_IMPORT_RECIPE, + {ATTR_URL: "http://example.com"}, + "import_recipe", + MealieValidationError, + ServiceValidationError, + "Mealie could not import the recipe from the URL", + ), + ( + SERVICE_SET_RANDOM_MEALPLAN, + {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, + "random_mealplan", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ( + SERVICE_SET_MEALPLAN, + { + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + ATTR_RECIPE_ID: "recipe_id", + }, + "set_mealplan", + MealieConnectionError, + HomeAssistantError, + "Error connecting to Mealie instance", + ), + ], +) +async def test_services_connection_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service: str, + payload: dict[str, str], + function: str, + exception: Exception, + raised_exception: type[Exception], + message: str, +) -> None: + """Test a connection error in the services.""" + + await setup_integration(hass, mock_config_entry) + + getattr(mock_mealie_client, function).side_effect = exception + + with pytest.raises(raised_exception, match=message): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry.entry_id} | payload, + blocking=True, + return_response=True, + ) + + +@pytest.mark.parametrize( + ("service", "payload"), + [ + (SERVICE_GET_MEALPLAN, {}), + (SERVICE_GET_RECIPE, {ATTR_RECIPE_ID: "recipe_id"}), + (SERVICE_IMPORT_RECIPE, {ATTR_URL: "http://example.com"}), + ( + SERVICE_SET_RANDOM_MEALPLAN, + {ATTR_DATE: "2023-10-21", ATTR_ENTRY_TYPE: "lunch"}, + ), + ( + SERVICE_SET_MEALPLAN, + { + ATTR_DATE: "2023-10-21", + ATTR_ENTRY_TYPE: "lunch", + ATTR_RECIPE_ID: "recipe_id", + }, + ), + ], +) +async def test_service_entry_availability( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + service: str, + payload: dict[str, str], +) -> None: + """Test the services without valid entry.""" + mock_config_entry.add_to_hass(hass) + mock_config_entry2 = MockConfigEntry(domain=DOMAIN) + mock_config_entry2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises(ServiceValidationError, match="Mock Title is not loaded"): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: mock_config_entry2.entry_id} | payload, + blocking=True, + return_response=True, + ) + + with pytest.raises( + ServiceValidationError, match='Integration "mealie" not found in registry' + ): + await hass.services.async_call( + DOMAIN, + service, + {ATTR_CONFIG_ENTRY_ID: "bad-config_id"} | payload, + blocking=True, + return_response=True, + ) diff --git a/tests/components/mealie/test_todo.py b/tests/components/mealie/test_todo.py new file mode 100644 index 00000000000..920cfc47397 --- /dev/null +++ b/tests/components/mealie/test_todo.py @@ -0,0 +1,197 @@ +"""Tests for the Mealie todo.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from aiomealie import MealieError, ShoppingListsResponse +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.mealie import DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) + + +async def test_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test todo entities.""" + with patch("homeassistant.components.mealie.PLATFORMS", [Platform.TODO]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_add_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for adding a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.add_shopping_item.assert_called_once() + + +async def test_add_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to add a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.add_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_update_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for updating a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.update_shopping_item.assert_called_once() + + +async def test_update_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to update a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.update_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "aubergine", ATTR_RENAME: "Eggplant", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_delete_todo_list_item( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for deleting a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + mock_mealie_client.delete_shopping_item.assert_called_once() + + +async def test_delete_todo_list_item_error( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test for failing to delete a To-do Item.""" + await setup_integration(hass, mock_config_entry) + + mock_mealie_client.delete_shopping_item = AsyncMock() + mock_mealie_client.delete_shopping_item.side_effect = MealieError + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: "aubergine"}, + target={ATTR_ENTITY_ID: "todo.mealie_supermarket"}, + blocking=True, + ) + + +async def test_runtime_management( + hass: HomeAssistant, + mock_mealie_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test for creating and deleting shopping lists.""" + response = ShoppingListsResponse.from_json( + load_fixture("get_shopping_lists.json", DOMAIN) + ).items + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + await setup_integration(hass, mock_config_entry) + assert hass.states.get("todo.mealie_supermarket") is not None + assert hass.states.get("todo.mealie_special_groceries") is None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=response[0:2] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is not None + + mock_mealie_client.get_shopping_lists.return_value = ShoppingListsResponse( + items=[response[0]] + ) + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert hass.states.get("todo.mealie_special_groceries") is None diff --git a/tests/components/media_extractor/conftest.py b/tests/components/media_extractor/conftest.py index 1d198681f3f..58d51f1cb2e 100644 --- a/tests/components/media_extractor/conftest.py +++ b/tests/components/media_extractor/conftest.py @@ -1,20 +1,18 @@ """Common fixtures for the Media Extractor tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.media_extractor import DOMAIN -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MockYoutubeDL from .const import AUDIO_QUERY -from tests.common import async_mock_service - @pytest.fixture(autouse=True) async def setup_homeassistant(hass: HomeAssistant): @@ -31,12 +29,6 @@ async def setup_media_player(hass: HomeAssistant) -> None: await hass.async_block_till_done() -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "media_player", "play_media") - - @pytest.fixture(name="mock_youtube_dl") async def setup_mock_yt_dlp(hass: HomeAssistant) -> MockYoutubeDL: """Mock YoutubeDL.""" diff --git a/tests/components/media_extractor/test_init.py b/tests/components/media_extractor/test_init.py index 8c8a1407ccc..bc80e063697 100644 --- a/tests/components/media_extractor/test_init.py +++ b/tests/components/media_extractor/test_init.py @@ -100,7 +100,7 @@ async def test_extracting_playlist_no_entries( async def test_play_media_service( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, request: pytest.FixtureRequest, config_fixture: str, @@ -123,13 +123,14 @@ async def test_play_media_service( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_download_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test handling DownloadError.""" @@ -152,7 +153,7 @@ async def test_download_error( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 assert f"Could not retrieve data for the URL: {YOUTUBE_VIDEO}" in caplog.text @@ -160,7 +161,7 @@ async def test_no_target_entity( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test having no target entity.""" @@ -179,14 +180,15 @@ async def test_no_target_entity( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_playlist( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], snapshot: SnapshotAssertion, ) -> None: """Test extracting a playlist.""" @@ -205,14 +207,15 @@ async def test_playlist( ) await hass.async_block_till_done() - assert calls[0].data == snapshot + assert len(service_calls) == 2 + assert service_calls[1].data == snapshot async def test_playlist_no_entries( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test extracting a playlist without entries.""" @@ -231,7 +234,7 @@ async def test_playlist_no_entries( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 assert ( f"Could not retrieve data for the URL: {YOUTUBE_EMPTY_PLAYLIST}" in caplog.text ) @@ -240,7 +243,7 @@ async def test_playlist_no_entries( async def test_query_error( hass: HomeAssistant, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test handling error with query.""" @@ -270,15 +273,13 @@ async def test_query_error( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_cookiefile_detection( hass: HomeAssistant, mock_youtube_dl: MockYoutubeDL, empty_media_extractor_config: dict[str, Any], - calls: list[ServiceCall], - snapshot: SnapshotAssertion, caplog: pytest.LogCaptureFixture, ) -> None: """Test cookie file detection.""" @@ -289,16 +290,19 @@ async def test_cookiefile_detection( cookies_dir = os.path.join(hass.config.config_dir, "media_extractor") cookies_file = os.path.join(cookies_dir, "cookies.txt") - if not os.path.exists(cookies_dir): - os.makedirs(cookies_dir) + def _write_cookies_file() -> None: + if not os.path.exists(cookies_dir): + os.makedirs(cookies_dir) - with open(cookies_file, "w+", encoding="utf-8") as f: - f.write( - """# Netscape HTTP Cookie File + with open(cookies_file, "w+", encoding="utf-8") as f: + f.write( + """# Netscape HTTP Cookie File - .youtube.com TRUE / TRUE 1701708706 GPS 1 - """ - ) + .youtube.com TRUE / TRUE 1701708706 GPS 1 + """ + ) + + await hass.async_add_executor_job(_write_cookies_file) await hass.services.async_call( DOMAIN, @@ -313,7 +317,7 @@ async def test_cookiefile_detection( assert "Media extractor loaded cookies file" in caplog.text - os.remove(cookies_file) + await hass.async_add_executor_job(os.remove, cookies_file) await hass.services.async_call( DOMAIN, diff --git a/tests/components/media_player/test_device_condition.py b/tests/components/media_player/test_device_condition.py index 186cd674b39..78d30e2ca6e 100644 --- a/tests/components/media_player/test_device_condition.py +++ b/tests/components/media_player/test_device_condition.py @@ -20,11 +20,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -32,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -136,7 +126,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -274,8 +264,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") @@ -285,8 +275,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off - event - test_event2" hass.states.async_set(entry.entity_id, STATE_IDLE) hass.bus.async_fire("test_event1") @@ -296,8 +286,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_idle - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_idle - event - test_event3" hass.states.async_set(entry.entity_id, STATE_PAUSED) hass.bus.async_fire("test_event1") @@ -307,8 +297,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "is_paused - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "is_paused - event - test_event4" hass.states.async_set(entry.entity_id, STATE_PLAYING) hass.bus.async_fire("test_event1") @@ -318,8 +308,8 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 5 - assert calls[4].data["some"] == "is_playing - event - test_event5" + assert len(service_calls) == 5 + assert service_calls[4].data["some"] == "is_playing - event - test_event5" hass.states.async_set(entry.entity_id, STATE_BUFFERING) hass.bus.async_fire("test_event1") @@ -329,15 +319,15 @@ async def test_if_state( hass.bus.async_fire("test_event5") hass.bus.async_fire("test_event6") await hass.async_block_till_done() - assert len(calls) == 6 - assert calls[5].data["some"] == "is_buffering - event - test_event6" + assert len(service_calls) == 6 + assert service_calls[5].data["some"] == "is_buffering - event - test_event6" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -380,5 +370,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on - event - test_event1" diff --git a/tests/components/media_player/test_device_trigger.py b/tests/components/media_player/test_device_trigger.py index e9d5fbd646e..4bb27b73f24 100644 --- a/tests/components/media_player/test_device_trigger.py +++ b/tests/components/media_player/test_device_trigger.py @@ -28,7 +28,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -209,7 +202,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -265,8 +258,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { "turned_on - device - media_player.test_5678 - off - on - None", "changed_states - device - media_player.test_5678 - off - on - None", } @@ -274,8 +267,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is turning off. hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { "turned_off - device - media_player.test_5678 - on - off - None", "changed_states - device - media_player.test_5678 - on - off - None", } @@ -283,8 +276,8 @@ async def test_if_fires_on_state_change( # Fake that the entity becomes idle. hass.states.async_set(entry.entity_id, STATE_IDLE) await hass.async_block_till_done() - assert len(calls) == 6 - assert {calls[4].data["some"], calls[5].data["some"]} == { + assert len(service_calls) == 6 + assert {service_calls[4].data["some"], service_calls[5].data["some"]} == { "idle - device - media_player.test_5678 - off - idle - None", "changed_states - device - media_player.test_5678 - off - idle - None", } @@ -292,8 +285,8 @@ async def test_if_fires_on_state_change( # Fake that the entity starts playing. hass.states.async_set(entry.entity_id, STATE_PLAYING) await hass.async_block_till_done() - assert len(calls) == 8 - assert {calls[6].data["some"], calls[7].data["some"]} == { + assert len(service_calls) == 8 + assert {service_calls[6].data["some"], service_calls[7].data["some"]} == { "playing - device - media_player.test_5678 - idle - playing - None", "changed_states - device - media_player.test_5678 - idle - playing - None", } @@ -301,8 +294,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is paused. hass.states.async_set(entry.entity_id, STATE_PAUSED) await hass.async_block_till_done() - assert len(calls) == 10 - assert {calls[8].data["some"], calls[9].data["some"]} == { + assert len(service_calls) == 10 + assert {service_calls[8].data["some"], service_calls[9].data["some"]} == { "paused - device - media_player.test_5678 - playing - paused - None", "changed_states - device - media_player.test_5678 - playing - paused - None", } @@ -310,8 +303,8 @@ async def test_if_fires_on_state_change( # Fake that the entity is buffering. hass.states.async_set(entry.entity_id, STATE_BUFFERING) await hass.async_block_till_done() - assert len(calls) == 12 - assert {calls[10].data["some"], calls[11].data["some"]} == { + assert len(service_calls) == 12 + assert {service_calls[10].data["some"], service_calls[11].data["some"]} == { "buffering - device - media_player.test_5678 - paused - buffering - None", "changed_states - device - media_player.test_5678 - paused - buffering - None", } @@ -321,7 +314,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -369,9 +362,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is turning on. hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "turned_on - device - media_player.test_5678 - off - on - None" ) @@ -380,7 +373,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -426,16 +419,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - off - on - 0:00:05" ) diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index 4c7fbd06edc..de90f229a85 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -1,5 +1,6 @@ """Test Local Media Source.""" +from collections.abc import AsyncGenerator from http import HTTPStatus import io from pathlib import Path @@ -7,7 +8,6 @@ from tempfile import TemporaryDirectory from unittest.mock import patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import media_source, websocket_api from homeassistant.components.media_source import const diff --git a/tests/components/melcloud/test_diagnostics.py b/tests/components/melcloud/test_diagnostics.py index cbb35eadfd4..32ec94a54d1 100644 --- a/tests/components/melcloud/test_diagnostics.py +++ b/tests/components/melcloud/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.melcloud.const import DOMAIN from homeassistant.core import HomeAssistant @@ -36,4 +37,4 @@ async def test_get_config_entry_diagnostics( diagnostics = await get_diagnostics_for_config_entry( hass, hass_client, config_entry ) - assert diagnostics == snapshot + assert diagnostics == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/melnor/conftest.py b/tests/components/melnor/conftest.py index 38bc1a62d51..f30213c4efd 100644 --- a/tests/components/melnor/conftest.py +++ b/tests/components/melnor/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from datetime import UTC, datetime, time, timedelta from unittest.mock import AsyncMock, _patch, patch from melnor_bluetooth.device import Device import pytest -from typing_extensions import Generator from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.components.melnor.const import DOMAIN diff --git a/tests/components/melnor/test_sensor.py b/tests/components/melnor/test_sensor.py index d04494d44ad..a2ba23d9e61 100644 --- a/tests/components/melnor/test_sensor.py +++ b/tests/components/melnor/test_sensor.py @@ -2,6 +2,8 @@ from __future__ import annotations +from datetime import timedelta + from freezegun import freeze_time from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass @@ -51,7 +53,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - end_time = now + dt_util.dt.timedelta(minutes=10) + end_time = now + timedelta(minutes=10) # we control this mock @@ -76,7 +78,7 @@ async def test_minutes_remaining_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._is_watering = True - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 @@ -94,7 +96,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: entry = mock_config_entry(hass) device = mock_melnor_device() - next_cycle = now + dt_util.dt.timedelta(minutes=10) + next_cycle = now + timedelta(minutes=10) # we control this mock device.zone1.frequency._next_run_time = next_cycle @@ -118,7 +120,7 @@ async def test_schedule_next_cycle_sensor(hass: HomeAssistant) -> None: # Turn valve on device.zone1._schedule_enabled = True - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() # Valve is on, report 10 diff --git a/tests/components/melnor/test_time.py b/tests/components/melnor/test_time.py index 1d12c3b47f8..50b51d31ff8 100644 --- a/tests/components/melnor/test_time.py +++ b/tests/components/melnor/test_time.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import time +from datetime import time, timedelta from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -46,7 +46,7 @@ async def test_schedule_start_time(hass: HomeAssistant) -> None: blocking=True, ) - async_fire_time_changed(hass, now + dt_util.dt.timedelta(seconds=10)) + async_fire_time_changed(hass, now + timedelta(seconds=10)) await hass.async_block_till_done() time_entity = hass.states.get("time.zone_1_schedule_start_time") diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index 082def901c5..dca760230ac 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -8,18 +8,13 @@ from pycsspeechtts import pycsspeechtts import pytest from homeassistant.components import tts -from homeassistant.components.media_player import ( - ATTR_MEDIA_CONTENT_ID, - DOMAIN as DOMAIN_MP, - SERVICE_PLAY_MEDIA, -) +from homeassistant.components.media_player import ATTR_MEDIA_CONTENT_ID from homeassistant.components.microsoft.tts import SUPPORTED_LANGUAGES from homeassistant.config import async_process_ha_core_config from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ServiceNotFound from homeassistant.setup import async_setup_component -from tests.common import async_mock_service from tests.components.tts.common import retrieve_media from tests.typing import ClientSessionGenerator @@ -30,12 +25,6 @@ def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: return mock_tts_cache_dir -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Mock media player calls.""" - return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) - - @pytest.fixture(autouse=True) async def setup_internal_url(hass: HomeAssistant): """Set up internal url.""" @@ -58,7 +47,7 @@ async def test_service_say( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say.""" @@ -77,9 +66,11 @@ async def test_service_say( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -102,7 +93,7 @@ async def test_service_say_en_gb_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the config.""" @@ -130,9 +121,11 @@ async def test_service_say_en_gb_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -154,7 +147,7 @@ async def test_service_say_en_gb_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with en-gb code in the service.""" @@ -177,9 +170,11 @@ async def test_service_say_en_gb_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -201,7 +196,7 @@ async def test_service_say_fa_ir_config( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the config.""" @@ -229,9 +224,11 @@ async def test_service_say_fa_ir_config( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -253,7 +250,7 @@ async def test_service_say_fa_ir_service( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with fa-ir code in the service.""" @@ -280,9 +277,11 @@ async def test_service_say_fa_ir_service( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.OK ) @@ -317,9 +316,7 @@ def test_supported_languages() -> None: assert len(SUPPORTED_LANGUAGES) > 100 -async def test_invalid_language( - hass: HomeAssistant, mock_tts, calls: list[ServiceCall] -) -> None: +async def test_invalid_language(hass: HomeAssistant, mock_tts) -> None: """Test setup component with invalid language.""" await async_setup_component( hass, @@ -339,7 +336,6 @@ async def test_invalid_language( blocking=True, ) - assert len(calls) == 0 assert len(mock_tts.mock_calls) == 0 @@ -347,7 +343,7 @@ async def test_service_say_error( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test service call say with http error.""" mock_tts.return_value.speak.side_effect = pycsspeechtts.requests.HTTPError @@ -366,9 +362,11 @@ async def test_service_say_error( blocking=True, ) - assert len(calls) == 1 + assert len(service_calls) == 2 assert ( - await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + await retrieve_media( + hass, hass_client, service_calls[1].data[ATTR_MEDIA_CONTENT_ID] + ) == HTTPStatus.NOT_FOUND ) diff --git a/tests/components/mjpeg/conftest.py b/tests/components/mjpeg/conftest.py index 00eaf946113..12e0b4c0faf 100644 --- a/tests/components/mjpeg/conftest.py +++ b/tests/components/mjpeg/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from requests_mock import Mocker -from typing_extensions import Generator from homeassistant.components.mjpeg.const import ( CONF_MJPEG_URL, diff --git a/tests/components/mobile_app/conftest.py b/tests/components/mobile_app/conftest.py index 657b80a759a..9f0681d41f7 100644 --- a/tests/components/mobile_app/conftest.py +++ b/tests/components/mobile_app/conftest.py @@ -67,7 +67,7 @@ async def webhook_client( @pytest.fixture(autouse=True) -async def setup_ws(hass): +async def setup_ws(hass: HomeAssistant) -> None: """Configure the websocket_api component.""" assert await async_setup_component(hass, "repairs", {}) assert await async_setup_component(hass, "websocket_api", {}) diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index 15380a0d8d7..e1c7ed27cf9 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -89,6 +89,7 @@ async def _test_create_cloud_hook( "homeassistant.components.cloud.async_active_subscription", return_value=async_active_subscription_return_value, ), + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), patch("homeassistant.components.cloud.async_is_connected", return_value=True), patch( "homeassistant.components.cloud.async_get_or_create_cloudhook", @@ -187,3 +188,41 @@ async def test_create_cloud_hook_after_connection( ) await _test_create_cloud_hook(hass, hass_admin_user, {}, False, additional_steps) + + +@pytest.mark.parametrize( + ("cloud_logged_in", "should_cloudhook_exist"), + [(True, True), (False, False)], +) +async def test_delete_cloud_hook( + hass: HomeAssistant, + hass_admin_user: MockUser, + cloud_logged_in: bool, + should_cloudhook_exist: bool, +) -> None: + """Test deleting the cloud hook only when logged out of the cloud.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.cloud.async_is_logged_in", + return_value=cloud_logged_in, + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist diff --git a/tests/components/mobile_app/test_timers.py b/tests/components/mobile_app/test_timers.py new file mode 100644 index 00000000000..9f7d4cebc58 --- /dev/null +++ b/tests/components/mobile_app/test_timers.py @@ -0,0 +1,70 @@ +"""Test mobile app timers.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.mobile_app import DATA_DEVICES, DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import intent as intent_helper + + +@pytest.mark.parametrize( + ("intent_args", "message"), + [ + ( + {}, + "0:02:00 timer finished", + ), + ( + {"name": {"value": "pizza"}}, + "pizza finished", + ), + ], +) +async def test_timer_events( + hass: HomeAssistant, push_registration, intent_args: dict, message: str +) -> None: + """Test for timer events.""" + webhook_id = push_registration["webhook_id"] + device_id = hass.data[DOMAIN][DATA_DEVICES][webhook_id].id + + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "minutes": {"value": 2}, + } + | intent_args, + device_id=device_id, + ) + + with patch( + "homeassistant.components.mobile_app.notify.MobileAppNotificationService.async_send_message" + ) as mock_send_message: + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_DECREASE_TIMER, + { + "minutes": {"value": 2}, + }, + device_id=device_id, + ) + await hass.async_block_till_done() + + assert mock_send_message.mock_calls[0][2] == { + "target": [webhook_id], + "message": message, + "data": { + "channel": "Timers", + "group": "timers", + "importance": "high", + "ttl": 0, + "priority": "high", + "push": { + "interruption-level": "time-sensitive", + }, + }, + } diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 067fb2d123d..6741504585a 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -61,7 +61,7 @@ def register_words_fixture(): @pytest.fixture(name="config_addon") -def config_addon_fixture(): +def config_addon_fixture() -> dict[str, Any] | None: """Add extra configuration items.""" return None diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index a52285b22d7..5578234ee6e 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -766,7 +766,7 @@ async def test_service_climate_swing_update( ("temperature", "result", "do_config"), [ ( - 35, + 31, [0x00], { CONF_CLIMATES: [ @@ -781,7 +781,7 @@ async def test_service_climate_swing_update( }, ), ( - 36, + 32, [0x00, 0x00], { CONF_CLIMATES: [ @@ -796,7 +796,7 @@ async def test_service_climate_swing_update( }, ), ( - 37.5, + 33.5, [0x00, 0x00], { CONF_CLIMATES: [ @@ -811,7 +811,7 @@ async def test_service_climate_swing_update( }, ), ( - "39", + "34", [0x00, 0x00, 0x00, 0x00], { CONF_CLIMATES: [ diff --git a/tests/components/modbus/test_init.py b/tests/components/modbus/test_init.py index 920003ad0c9..d4dc5b05fac 100644 --- a/tests/components/modbus/test_init.py +++ b/tests/components/modbus/test_init.py @@ -48,15 +48,7 @@ from homeassistant.components.modbus.const import ( CONF_FAN_MODE_HIGH, CONF_FAN_MODE_OFF, CONF_FAN_MODE_ON, - CONF_FAN_MODE_REGISTER, CONF_FAN_MODE_VALUES, - CONF_HVAC_MODE_COOL, - CONF_HVAC_MODE_DRY, - CONF_HVAC_MODE_HEAT, - CONF_HVAC_MODE_HEAT_COOL, - CONF_HVAC_MODE_REGISTER, - CONF_HVAC_MODE_VALUES, - CONF_HVAC_ONOFF_REGISTER, CONF_INPUT_TYPE, CONF_MSG_WAIT, CONF_PARITY, @@ -67,12 +59,10 @@ from homeassistant.components.modbus.const import ( CONF_SWAP_BYTE, CONF_SWAP_WORD, CONF_SWAP_WORD_BYTE, - CONF_SWING_MODE_REGISTER, CONF_SWING_MODE_SWING_BOTH, CONF_SWING_MODE_SWING_OFF, CONF_SWING_MODE_SWING_ON, CONF_SWING_MODE_VALUES, - CONF_TARGET_TEMP, CONF_VIRTUAL_COUNT, DEFAULT_SCAN_INTERVAL, MODBUS_DOMAIN as DOMAIN, @@ -88,7 +78,6 @@ from homeassistant.components.modbus.const import ( ) from homeassistant.components.modbus.validators import ( check_config, - check_hvac_target_temp_registers, duplicate_fan_mode_validator, duplicate_swing_mode_validator, hvac_fixedsize_reglist_validator, @@ -457,27 +446,6 @@ async def test_check_config(hass: HomeAssistant, do_config) -> None: ], } ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - } - ], ], ) async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: @@ -510,225 +478,6 @@ async def test_check_config_sensor(hass: HomeAssistant, do_config) -> None: ], } ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 118, - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 119, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 118, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 118, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: [99], - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: [117], - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: [121], - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], - [ # Testing Swing modes - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_SWING_MODE_VALUES: { - CONF_SWING_MODE_SWING_ON: 0, - CONF_SWING_MODE_SWING_BOTH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - CONF_TARGET_TEMP: 118, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: [120], - CONF_SWING_MODE_VALUES: { - CONF_SWING_MODE_SWING_ON: 0, - CONF_SWING_MODE_SWING_BOTH: 1, - }, - }, - }, - ], - } - ], - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_CLIMATES: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 118, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 122, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - { - CONF_NAME: TEST_ENTITY_NAME + " 2", - CONF_ADDRESS: 118, - CONF_TARGET_TEMP: [130, 131, 132, 133, 134, 135, 136], - CONF_SLAVE: 0, - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 130, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 122, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 120, - CONF_FAN_MODE_VALUES: { - CONF_FAN_MODE_ON: 0, - CONF_FAN_MODE_HIGH: 1, - }, - }, - }, - ], - } - ], ], ) async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: @@ -737,83 +486,6 @@ async def test_check_config_climate(hass: HomeAssistant, do_config) -> None: assert len(do_config[0][CONF_CLIMATES]) == 1 -@pytest.mark.parametrize( - "do_config", - [ - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117, 121, 119, 150, 151, 152, 156], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 119, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 121, - }, - }, - ], - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 117, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_FAN_MODE_REGISTER: { - CONF_ADDRESS: 117, - }, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: 117, - }, - }, - ], - [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1, - CONF_TARGET_TEMP: [117], - CONF_HVAC_MODE_REGISTER: { - CONF_ADDRESS: 117, - CONF_HVAC_MODE_VALUES: { - CONF_HVAC_MODE_COOL: 0, - CONF_HVAC_MODE_HEAT: 1, - CONF_HVAC_MODE_HEAT_COOL: 2, - CONF_HVAC_MODE_DRY: 3, - }, - }, - CONF_HVAC_ONOFF_REGISTER: 117, - CONF_SWING_MODE_REGISTER: { - CONF_ADDRESS: [117], - }, - }, - ], - ], -) -async def test_climate_conflict_addresses(do_config) -> None: - """Test conflicts among the addresses of target temp and other climate addresses.""" - check_hvac_target_temp_registers(do_config[0]) - assert CONF_HVAC_MODE_REGISTER not in do_config[0] - assert CONF_HVAC_ONOFF_REGISTER not in do_config[0] - assert CONF_FAN_MODE_REGISTER not in do_config[0] - assert CONF_SWING_MODE_REGISTER not in do_config[0] - - @pytest.mark.parametrize( "do_config", [ @@ -852,157 +524,6 @@ async def test_duplicate_swing_mode_validator(do_config) -> None: assert len(do_config[CONF_SWING_MODE_VALUES]) == 2 -@pytest.mark.parametrize( - ("do_config", "sensor_cnt"), - [ - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 117, - CONF_SLAVE: 1, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - ], - }, - ], - 1, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME + "1", - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - { - CONF_NAME: TEST_MODBUS_NAME + "1", - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 119, - CONF_SLAVE: 0, - }, - ], - }, - ], - 2, - ), - ( - [ - { - CONF_NAME: TEST_MODBUS_NAME, - CONF_TYPE: TCP, - CONF_HOST: TEST_MODBUS_HOST, - CONF_PORT: TEST_PORT_TCP, - CONF_TIMEOUT: 3, - CONF_SENSORS: [ - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 117, - CONF_SLAVE: 0, - }, - { - CONF_NAME: TEST_ENTITY_NAME, - CONF_ADDRESS: 1179, - CONF_SLAVE: 0, - }, - ], - }, - ], - 1, - ), - ], -) -async def test_duplicate_addresses(hass: HomeAssistant, do_config, sensor_cnt) -> None: - """Test duplicate entity validator.""" - check_config(hass, do_config) - use_inx = len(do_config) - 1 - assert len(do_config[use_inx][CONF_SENSORS]) == sensor_cnt - - @pytest.mark.parametrize( "do_config", [ diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index 760d82dfedc..2de1d34b403 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -18,7 +18,7 @@ from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -def init_sensors_fixture(hass): +def init_sensors_fixture(hass: HomeAssistant) -> None: """Set up things to be run when tests are started.""" hass.states.async_set( "test.indoortemp", "20", {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS} diff --git a/tests/components/moon/conftest.py b/tests/components/moon/conftest.py index 6fa54fcb603..3cf0eb1afc3 100644 --- a/tests/components/moon/conftest.py +++ b/tests/components/moon/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.moon.const import DOMAIN diff --git a/tests/components/mopeka/test_config_flow.py b/tests/components/mopeka/test_config_flow.py index 826fe8db2aa..7a341052f22 100644 --- a/tests/components/mopeka/test_config_flow.py +++ b/tests/components/mopeka/test_config_flow.py @@ -2,8 +2,10 @@ from unittest.mock import patch +import voluptuous as vol + from homeassistant import config_entries -from homeassistant.components.mopeka.const import DOMAIN +from homeassistant.components.mopeka.const import CONF_MEDIUM_TYPE, DOMAIN, MediumType from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,13 +23,14 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "bluetooth_confirm" + with patch("homeassistant.components.mopeka.async_setup_entry", return_value=True): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} + result["flow_id"], user_input={CONF_MEDIUM_TYPE: MediumType.PROPANE.value} ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert result2["data"] == {CONF_MEDIUM_TYPE: MediumType.PROPANE.value} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -71,7 +74,10 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -190,8 +196,44 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" # Verify the original one was aborted assert not hass.config_entries.flow.async_progress(DOMAIN) + + +async def test_async_step_reconfigure_options(hass: HomeAssistant) -> None: + """Test reconfig options: change MediumType from air to fresh water.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:75:10", + title="TD40/TD200 7510", + data={CONF_MEDIUM_TYPE: MediumType.AIR.value}, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.AIR.value + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + schema: vol.Schema = result["data_schema"] + medium_type_key = next( + iter(key for key in schema.schema if key == CONF_MEDIUM_TYPE) + ) + assert medium_type_key.default() == MediumType.AIR.value + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MEDIUM_TYPE: MediumType.FRESH_WATER.value}, + ) + assert result2["type"] == FlowResultType.CREATE_ENTRY + + # Verify the new configuration + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.FRESH_WATER.value diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index 342e958eae4..00db23734dd 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -1,9 +1,9 @@ """Setup the Motionblinds Bluetooth tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator TEST_MAC = "abcd" TEST_NAME = f"MOTION_{TEST_MAC.upper()}" diff --git a/tests/components/motionmount/conftest.py b/tests/components/motionmount/conftest.py index 9e5b0355387..49f624b5266 100644 --- a/tests/components/motionmount/conftest.py +++ b/tests/components/motionmount/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Vogel's MotionMount integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.motionmount.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT diff --git a/tests/components/mpd/conftest.py b/tests/components/mpd/conftest.py index 818f085decc..a73a529cd0b 100644 --- a/tests/components/mpd/conftest.py +++ b/tests/components/mpd/conftest.py @@ -1,7 +1,7 @@ """Fixtures for Music Player Daemon integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -22,7 +22,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.mpd.async_setup_entry", return_value=True @@ -31,7 +31,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_mpd_client() -> Generator[AsyncMock, None, None]: +def mock_mpd_client() -> Generator[MagicMock]: """Return a mock for Music Player Daemon client.""" with patch( diff --git a/tests/components/mqtt/conftest.py b/tests/components/mqtt/conftest.py index 774785bb42a..7395767aeae 100644 --- a/tests/components/mqtt/conftest.py +++ b/tests/components/mqtt/conftest.py @@ -1,12 +1,12 @@ """Test fixtures for mqtt component.""" import asyncio +from collections.abc import AsyncGenerator, Generator from random import getrandbits from typing import Any from unittest.mock import patch import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components import mqtt from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage diff --git a/tests/components/mqtt/test_alarm_control_panel.py b/tests/components/mqtt/test_alarm_control_panel.py index aba2d5f6da2..07ebb671e37 100644 --- a/tests/components/mqtt/test_alarm_control_panel.py +++ b/tests/components/mqtt/test_alarm_control_panel.py @@ -853,10 +853,7 @@ async def test_availability_without_topic( ) -> None: """Test availability without defined availability topic.""" await help_test_availability_without_topic( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG_CODE, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE ) @@ -865,10 +862,7 @@ async def test_default_availability_payload( ) -> None: """Test availability by default payload with defined topic.""" await help_test_default_availability_payload( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG_CODE, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG_CODE ) @@ -877,10 +871,7 @@ async def test_custom_availability_payload( ) -> None: """Test availability by custom payload with defined topic.""" await help_test_custom_availability_payload( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -889,10 +880,7 @@ async def test_setting_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -914,10 +902,7 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, - mqtt_mock_entry, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -928,11 +913,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -943,11 +924,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - alarm_control_panel.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, alarm_control_panel.DOMAIN, DEFAULT_CONFIG ) @@ -1259,8 +1236,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = alarm_control_panel.DOMAIN @@ -1283,8 +1259,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = alarm_control_panel.DOMAIN diff --git a/tests/components/mqtt/test_binary_sensor.py b/tests/components/mqtt/test_binary_sensor.py index 6ba479fca74..e2c168bd46e 100644 --- a/tests/components/mqtt/test_binary_sensor.py +++ b/tests/components/mqtt/test_binary_sensor.py @@ -758,10 +758,7 @@ async def test_setting_attribute_with_template( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_attribute_with_template( - hass, - mqtt_mock_entry, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -772,11 +769,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -787,11 +780,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - binary_sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1036,8 +1025,7 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = binary_sensor.DOMAIN diff --git a/tests/components/mqtt/test_button.py b/tests/components/mqtt/test_button.py index 7e5d748e2ab..d85ead6ecee 100644 --- a/tests/components/mqtt/test_button.py +++ b/tests/components/mqtt/test_button.py @@ -216,11 +216,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG ) @@ -231,11 +227,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - button.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, button.DOMAIN, DEFAULT_CONFIG ) @@ -488,8 +480,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = button.DOMAIN @@ -512,8 +503,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = button.DOMAIN diff --git a/tests/components/mqtt/test_camera.py b/tests/components/mqtt/test_camera.py index d02e19e6063..cda536dc19e 100644 --- a/tests/components/mqtt/test_camera.py +++ b/tests/components/mqtt/test_camera.py @@ -389,8 +389,7 @@ async def test_entity_debug_info_message( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = camera.DOMAIN @@ -413,8 +412,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = camera.DOMAIN diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index c41a6366dfe..13bd6b5feda 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -179,14 +179,14 @@ async def test_get_hvac_modes( state = hass.states.get(ENTITY_CLIMATE) modes = state.attributes.get("hvac_modes") - assert [ + assert modes == [ HVACMode.AUTO, HVACMode.OFF, HVACMode.COOL, HVACMode.HEAT, HVACMode.DRY, HVACMode.FAN_ONLY, - ] == modes + ] @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -654,11 +654,11 @@ async def test_set_target_temperature( assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) mqtt_mock.async_publish.reset_mock() - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "47.0", 0, False + "temperature-topic", "35.0", 0, False ) # also test directly supplying the operation mode to set_temperature @@ -713,7 +713,7 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -1017,7 +1017,16 @@ async def test_handle_action_received( # Cycle through valid modes # Redefine actions according to https://developers.home-assistant.io/docs/core/entity/climate/#hvac-action - actions = ["off", "preheating", "heating", "cooling", "drying", "idle", "fan"] + actions = [ + "off", + "preheating", + "defrosting", + "heating", + "cooling", + "drying", + "idle", + "fan", + ] assert all(elem in actions for elem in HVACAction) for action in actions: async_fire_mqtt_message(hass, "action", action) @@ -1581,13 +1590,13 @@ async def test_set_and_templates( assert state.attributes.get("swing_mode") == "on" # Temperature - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "temp: 47.0", 0, False + "temperature-topic", "temp: 35.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 # Temperature Low/High await common.async_set_temperature( @@ -1867,11 +1876,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG ) @@ -1882,11 +1887,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - climate.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, climate.DOMAIN, DEFAULT_CONFIG ) diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 457bd19c16f..2b4cb20ccf9 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1,6 +1,6 @@ """Test config flow.""" -from collections.abc import Iterator +from collections.abc import Generator, Iterator from contextlib import contextmanager from pathlib import Path from ssl import SSLError @@ -9,7 +9,6 @@ from unittest.mock import AsyncMock, MagicMock, patch from uuid import uuid4 import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant import config_entries @@ -851,16 +850,17 @@ async def test_invalid_discovery_prefix( assert mock_reload_after_entry_update.call_count == 0 -def get_default(schema: vol.Schema, key: str) -> Any: +def get_default(schema: vol.Schema, key: str) -> Any | None: """Get default value for key in voluptuous schema.""" for schema_key in schema: if schema_key == key: if schema_key.default == vol.UNDEFINED: return None return schema_key.default() + return None -def get_suggested(schema: vol.Schema, key: str) -> Any: +def get_suggested(schema: vol.Schema, key: str) -> Any | None: """Get suggested value for key in voluptuous schema.""" for schema_key in schema: if schema_key == key: @@ -870,6 +870,7 @@ def get_suggested(schema: vol.Schema, key: str) -> Any: ): return None return schema_key.description["suggested_value"] + return None @pytest.mark.usefixtures("mock_reload_after_entry_update") diff --git a/tests/components/mqtt/test_cover.py b/tests/components/mqtt/test_cover.py index f37de8b6a2e..451665de96a 100644 --- a/tests/components/mqtt/test_cover.py +++ b/tests/components/mqtt/test_cover.py @@ -3461,8 +3461,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = cover.DOMAIN diff --git a/tests/components/mqtt/test_device_tracker.py b/tests/components/mqtt/test_device_tracker.py index 9759dfcadd7..00e88860299 100644 --- a/tests/components/mqtt/test_device_tracker.py +++ b/tests/components/mqtt/test_device_tracker.py @@ -584,11 +584,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - device_tracker.DOMAIN, - DEFAULT_CONFIG, - None, + hass, mqtt_mock_entry, device_tracker.DOMAIN, DEFAULT_CONFIG, None ) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index ce75bd01a03..10322dd9046 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -17,11 +17,7 @@ from homeassistant.setup import async_setup_component from .test_common import help_test_unload_config_entry -from tests.common import ( - async_fire_mqtt_message, - async_get_device_automations, - async_mock_service, -) +from tests.common import async_fire_mqtt_message, async_get_device_automations from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator @@ -30,12 +26,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -284,7 +274,7 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing.""" @@ -350,20 +340,20 @@ async def test_if_fires_on_mqtt_message( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_discovery_id_is_prefered( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test if discovery is preferred over referencing by type/subtype. @@ -437,21 +427,21 @@ async def test_if_discovery_id_is_prefered( # Fake short press, matching on type and subtype async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press, matching on discovery_id - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "long_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "long_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "long_press" async def test_non_unique_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -528,20 +518,20 @@ async def test_non_unique_triggers( # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 - all_calls = {calls[0].data["some"], calls[1].data["some"]} + assert len(service_calls) == 2 + all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Trigger second config references to same trigger # and triggers both attached instances. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - all_calls = {calls[0].data["some"], calls[1].data["some"]} + assert len(service_calls) == 2 + all_calls = {service_calls[0].data["some"], service_calls[1].data["some"]} assert all_calls == {"press1", "press2"} # Removing the first trigger will clean up - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") await hass.async_block_till_done() await hass.async_block_till_done() @@ -549,13 +539,13 @@ async def test_non_unique_triggers( "Device trigger ('device_automation', 'bla1') has been removed" in caplog.text ) async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_mqtt_message_template( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing with a message template and a shared topic.""" @@ -623,20 +613,20 @@ async def test_if_fires_on_mqtt_message_template( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"short_press"}') await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button4", '{"button":"long_press"}') await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers firing of MQTT device triggers discovered after setup.""" @@ -710,20 +700,20 @@ async def test_if_fires_on_mqtt_message_late_discover( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message(hass, "foobar/triggers/button2", "long_press") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "long_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "long_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -782,7 +772,7 @@ async def test_if_fires_on_mqtt_message_after_update( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with existing type/subtype change async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data1) @@ -793,29 +783,29 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with same topic async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data3) await hass.async_block_till_done() - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/button1", "") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 - calls.clear() + service_calls.clear() async_fire_mqtt_message(hass, "foobar/triggers/buttonOne", "") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_no_resubscribe_same_topic( @@ -868,7 +858,7 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -911,7 +901,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", "") @@ -919,7 +909,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Rediscover the trigger async_fire_mqtt_message(hass, "homeassistant/device_automation/bla1/config", data1) @@ -927,14 +917,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Test triggers not firing after removal.""" @@ -982,7 +972,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( # Fake short press. async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove MQTT from the device mqtt_config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -994,7 +984,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove( @@ -1684,7 +1674,7 @@ async def test_trigger_debug_info( async def test_unload_entry( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, ) -> None: @@ -1727,7 +1717,7 @@ async def test_unload_entry( # Fake short press 1 async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await help_test_unload_config_entry(hass) @@ -1736,7 +1726,7 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Start entry again mqtt_entry = hass.config_entries.async_entries("mqtt")[0] @@ -1747,4 +1737,4 @@ async def test_unload_entry( await hass.async_block_till_done() async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 8c51e295998..58de3c53c52 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -1363,24 +1363,29 @@ EXCLUDED_MODULES = { async def test_missing_discover_abbreviations( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, ) -> None: """Check MQTT platforms for missing abbreviations.""" await mqtt_mock_entry() - missing = [] + missing: list[str] = [] regex = re.compile(r"(CONF_[a-zA-Z\d_]*) *= *[\'\"]([a-zA-Z\d_]*)[\'\"]") - for fil in Path(mqtt.__file__).parent.rglob("*.py"): - if fil.name in EXCLUDED_MODULES: - continue - with open(fil, encoding="utf-8") as file: - matches = re.findall(regex, file.read()) - missing.extend( - f"{fil}: no abbreviation for {match[1]} ({match[0]})" - for match in matches - if match[1] not in ABBREVIATIONS.values() - and match[1] not in DEVICE_ABBREVIATIONS.values() - and match[0] not in ABBREVIATIONS_WHITE_LIST - ) + + def _add_missing(): + for fil in Path(mqtt.__file__).parent.rglob("*.py"): + if fil.name in EXCLUDED_MODULES: + continue + with open(fil, encoding="utf-8") as file: + matches = re.findall(regex, file.read()) + missing.extend( + f"{fil}: no abbreviation for {match[1]} ({match[0]})" + for match in matches + if match[1] not in ABBREVIATIONS.values() + and match[1] not in DEVICE_ABBREVIATIONS.values() + and match[0] not in ABBREVIATIONS_WHITE_LIST + ) + + await hass.async_add_executor_job(_add_missing) assert not missing diff --git a/tests/components/mqtt/test_event.py b/tests/components/mqtt/test_event.py index 662a279f639..3d4847a406a 100644 --- a/tests/components/mqtt/test_event.py +++ b/tests/components/mqtt/test_event.py @@ -372,11 +372,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - event.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG ) @@ -387,11 +383,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - event.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, event.DOMAIN, DEFAULT_CONFIG ) diff --git a/tests/components/mqtt/test_fan.py b/tests/components/mqtt/test_fan.py index 2d1d717c58f..1d0cc809fd6 100644 --- a/tests/components/mqtt/test_fan.py +++ b/tests/components/mqtt/test_fan.py @@ -1590,7 +1590,7 @@ async def test_attributes( } }, True, - fan.FanEntityFeature(0), + fan.FanEntityFeature.TURN_OFF | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1605,7 +1605,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE, + fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1620,7 +1622,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1651,7 +1655,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1667,7 +1673,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1682,7 +1690,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1698,7 +1708,10 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.OSCILLATE | fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1714,7 +1727,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1730,7 +1745,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1747,7 +1764,10 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE | fan.FanEntityFeature.OSCILLATE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.OSCILLATE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1764,7 +1784,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.SET_SPEED, + fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, None, ), ( @@ -1831,7 +1853,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.PRESET_MODE, + fan.FanEntityFeature.PRESET_MODE + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, "some error", ), ( @@ -1846,7 +1870,9 @@ async def test_attributes( } }, True, - fan.FanEntityFeature.DIRECTION, + fan.FanEntityFeature.DIRECTION + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON, "some error", ), ], @@ -1932,11 +1958,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - fan.DOMAIN, - DEFAULT_CONFIG, - MQTT_FAN_ATTRIBUTES_BLOCKED, + hass, mqtt_mock_entry, fan.DOMAIN, DEFAULT_CONFIG, MQTT_FAN_ATTRIBUTES_BLOCKED ) @@ -1956,11 +1978,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - fan.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG ) @@ -1971,11 +1989,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - fan.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, fan.DOMAIN, DEFAULT_CONFIG ) diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 05180c17b2f..62de371af4b 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -1246,11 +1246,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG ) @@ -1261,11 +1257,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - humidifier.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, humidifier.DOMAIN, DEFAULT_CONFIG ) diff --git a/tests/components/mqtt/test_image.py b/tests/components/mqtt/test_image.py index bb029fba231..6f0eb8edf49 100644 --- a/tests/components/mqtt/test_image.py +++ b/tests/components/mqtt/test_image.py @@ -573,11 +573,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG ) @@ -588,11 +584,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - image.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, image.DOMAIN, DEFAULT_CONFIG ) diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 403f7974878..51379dc8508 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -260,10 +260,12 @@ async def test_service_call_without_topic_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_topic_and_topic_template_does_not_publish( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_topic_and_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with topic/topic template. + """Test the mqtt publish action call with topic/topic template. If both 'topic' and 'topic_template' are provided then fail. """ @@ -284,10 +286,12 @@ async def test_service_call_with_topic_and_topic_template_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_invalid_topic_template_does_not_publish( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_action_call_with_invalid_topic_template_does_not_publish( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with a problematic topic template.""" + """Test the mqtt publish action call with a problematic topic template.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -307,10 +311,12 @@ async def test_service_call_with_invalid_topic_template_does_not_publish( assert not mqtt_mock.async_publish.called -async def test_service_call_with_template_topic_renders_template( +# The use of a topic_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_template_topic_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered topic template. + """Test the mqtt publish action call with rendered topic template. If 'topic_template' is provided and 'topic' is not, then render it. """ @@ -331,7 +337,7 @@ async def test_service_call_with_template_topic_renders_template( async def test_service_call_with_template_topic_renders_invalid_topic( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered, invalid topic template. + """Test the action call with rendered, invalid topic template. If a wildcard topic is rendered, then fail. """ @@ -354,10 +360,12 @@ async def test_service_call_with_template_topic_renders_invalid_topic( assert not mqtt_mock.async_publish.called -async def test_service_call_with_invalid_rendered_template_topic_doesnt_render_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_action_call_with_invalid_rendered_payload_template_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with unrendered template. + """Test the action call with unrendered payload template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -378,10 +386,12 @@ async def test_service_call_with_invalid_rendered_template_topic_doesnt_render_t assert not mqtt_mock.async_publish.called -async def test_service_call_with_template_payload_renders_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_mqtt_publish_action_call_with_template_payload_renders_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with rendered template. + """Test the mqtt publish action call with rendered template. If 'payload_template' is provided and 'payload' is not, then render it. """ @@ -410,10 +420,12 @@ async def test_service_call_with_template_payload_renders_template( mqtt_mock.reset_mock() -async def test_service_call_with_bad_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_publish_action_call_with_bad_payload_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with a bad template does not publish.""" + """Test the mqtt publish action call with a bad template does not publish.""" mqtt_mock = await mqtt_mock_entry() with pytest.raises(MqttCommandTemplateException) as exc: await hass.services.async_call( @@ -432,10 +444,12 @@ async def test_service_call_with_bad_template( ) -async def test_service_call_with_payload_doesnt_render_template( +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 +async def test_action_call_with_payload_doesnt_render_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the service call with unrendered template. + """Test the mqtt publish action call with an unrendered template. If both 'payload' and 'payload_template' are provided then fail. """ @@ -1626,10 +1640,12 @@ async def test_debug_info_qos_retain( } in messages +# The use of a payload_template in an mqtt publish action call +# has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_publish_json_from_template( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: - """Test the publishing of call to services.""" + """Test the publishing of call to mqtt publish action.""" mqtt_mock = await mqtt_mock_entry() test_str = "{'valid': 'python', 'invalid': 'json'}" diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 120a09deb88..4906f6cfda3 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -91,8 +91,7 @@ DEFAULT_CONFIG = { ], ) async def test_run_lawn_mower_setup_and_state_updates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test that it sets up correctly fetches the given payload.""" await mqtt_mock_entry() @@ -442,11 +441,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG ) @@ -457,11 +452,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - lawn_mower.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lawn_mower.DOMAIN, DEFAULT_CONFIG ) @@ -511,8 +502,7 @@ async def test_discovery_removal_lawn_mower( async def test_discovery_update_lawn_mower( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered lawn_mower.""" config1 = { @@ -771,8 +761,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = lawn_mower.DOMAIN @@ -826,8 +815,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = lawn_mower.DOMAIN diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py index e4f5e3cd481..9b45b65d2cc 100644 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ b/tests/components/mqtt/test_legacy_vacuum.py @@ -23,7 +23,7 @@ DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} [ ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, False), + ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, True), ], ) async def test_removed_support_yaml( @@ -39,8 +39,8 @@ async def test_removed_support_yaml( if removed: assert entity is None assert ( - "The support for the `legacy` MQTT " - "vacuum schema has been removed" in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) else: assert entity is not None @@ -51,7 +51,7 @@ async def test_removed_support_yaml( [ ({"name": "test", "schema": "legacy"}, True), ({"name": "test"}, False), - ({"name": "test", "schema": "state"}, False), + ({"name": "test", "schema": "state"}, True), ], ) async def test_removed_support_discovery( @@ -69,12 +69,15 @@ async def test_removed_support_discovery( await hass.async_block_till_done() entity = hass.states.get("vacuum.test") + assert entity is not None if removed: - assert entity is None assert ( - "The support for the `legacy` MQTT " - "vacuum schema has been removed" in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) else: - assert entity is not None + assert ( + "The 'schema' option has been removed, " + "please remove it from your configuration" not in caplog.text + ) diff --git a/tests/components/mqtt/test_light.py b/tests/components/mqtt/test_light.py index bfce49b9ecb..18815281f63 100644 --- a/tests/components/mqtt/test_light.py +++ b/tests/components/mqtt/test_light.py @@ -2492,11 +2492,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2507,11 +2503,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2567,9 +2559,7 @@ async def test_discovery_removal_light( async def test_discovery_ignores_extra_keys( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test discovery ignores extra keys that are not blocked.""" await mqtt_mock_entry() @@ -3295,8 +3285,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -3378,7 +3367,6 @@ async def test_encoding_subscribable_topics( async def test_encoding_subscribable_topics_brightness( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, topic: str, value: str, attribute: str, @@ -3590,8 +3578,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_light_json.py b/tests/components/mqtt/test_light_json.py index 5ab2a32dc83..829222e0304 100644 --- a/tests/components/mqtt/test_light_json.py +++ b/tests/components/mqtt/test_light_json.py @@ -185,7 +185,6 @@ class JsonValidator: "hass_config", [{mqtt.DOMAIN: {light.DOMAIN: {"schema": "json", "name": "test"}}}] ) async def test_fail_setup_if_no_command_topic( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -204,7 +203,6 @@ async def test_fail_setup_if_no_command_topic( ], ) async def test_fail_setup_if_color_mode_deprecated( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -233,7 +231,6 @@ async def test_fail_setup_if_color_mode_deprecated( ids=["color_temp", "hs", "rgb", "xy", "color_temp, rgb"], ) async def test_warning_if_color_mode_flags_are_used( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, color_modes: tuple[str, ...], @@ -316,7 +313,6 @@ async def test_warning_on_discovery_if_color_mode_flags_are_used( ids=["color_temp"], ) async def test_warning_if_color_mode_option_flag_is_used( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -393,7 +389,6 @@ async def test_warning_on_discovery_if_color_mode_option_flag_is_used( ], ) async def test_fail_setup_if_color_modes_invalid( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, error: str, @@ -421,8 +416,7 @@ async def test_fail_setup_if_color_modes_invalid( ], ) async def test_single_color_mode( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup with single color_mode.""" await mqtt_mock_entry() @@ -448,8 +442,7 @@ async def test_single_color_mode( @pytest.mark.parametrize("hass_config", [COLOR_MODES_CONFIG]) async def test_turn_on_with_unknown_color_mode_optimistic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -486,8 +479,7 @@ async def test_turn_on_with_unknown_color_mode_optimistic( ], ) async def test_controlling_state_with_unknown_color_mode( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setup and turn with unknown color_mode in optimistic mode.""" await mqtt_mock_entry() @@ -2374,11 +2366,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2389,11 +2377,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -2666,8 +2650,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_light_template.py b/tests/components/mqtt/test_light_template.py index aace09f402a..d570454a6bf 100644 --- a/tests/components/mqtt/test_light_template.py +++ b/tests/components/mqtt/test_light_template.py @@ -978,11 +978,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -993,11 +989,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - light.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, light.DOMAIN, DEFAULT_CONFIG ) @@ -1288,8 +1280,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = light.DOMAIN @@ -1343,8 +1334,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = light.DOMAIN diff --git a/tests/components/mqtt/test_lock.py b/tests/components/mqtt/test_lock.py index c9546bdfdb3..331f21a0a7c 100644 --- a/tests/components/mqtt/test_lock.py +++ b/tests/components/mqtt/test_lock.py @@ -757,11 +757,7 @@ async def test_setting_blocked_attribute_via_mqtt_json_message( ) -> None: """Test the setting of attribute via MQTT with JSON payload.""" await help_test_setting_blocked_attribute_via_mqtt_json_message( - hass, - mqtt_mock_entry, - lock.DOMAIN, - DEFAULT_CONFIG, - MQTT_LOCK_ATTRIBUTES_BLOCKED, + hass, mqtt_mock_entry, lock.DOMAIN, DEFAULT_CONFIG, MQTT_LOCK_ATTRIBUTES_BLOCKED ) @@ -781,11 +777,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - lock.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG ) @@ -796,11 +788,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - lock.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, lock.DOMAIN, DEFAULT_CONFIG ) @@ -1008,8 +996,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = lock.DOMAIN @@ -1059,8 +1046,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = lock.DOMAIN diff --git a/tests/components/mqtt/test_mixins.py b/tests/components/mqtt/test_mixins.py index ae4d232ba54..5b7984cad62 100644 --- a/tests/components/mqtt/test_mixins.py +++ b/tests/components/mqtt/test_mixins.py @@ -15,7 +15,7 @@ from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.helpers import device_registry as dr, issue_registry as ir from tests.common import MockConfigEntry, async_capture_events, async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient +from tests.typing import MqttMockHAClientGenerator @pytest.mark.parametrize( @@ -37,8 +37,7 @@ from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient ], ) async def test_availability_with_shared_state_topic( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the state is not changed twice. @@ -295,11 +294,10 @@ async def test_availability_with_shared_state_topic( ], ) @patch("homeassistant.components.mqtt.client.DISCOVERY_COOLDOWN", 0.0) +@pytest.mark.usefixtures("mqtt_client_mock") async def test_default_entity_and_device_name( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mqtt_client_mock: MqttMockPahoClient, - mqtt_config_entry_data, caplog: pytest.LogCaptureFixture, entity_id: str, friendly_name: str, @@ -341,8 +339,7 @@ async def test_default_entity_and_device_name( async def test_name_attribute_is_set_or_not( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test frendly name with device_class set. diff --git a/tests/components/mqtt/test_notify.py b/tests/components/mqtt/test_notify.py index 540dbbafd99..4837ee214c4 100644 --- a/tests/components/mqtt/test_notify.py +++ b/tests/components/mqtt/test_notify.py @@ -199,11 +199,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG ) @@ -214,11 +210,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - notify.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, notify.DOMAIN, DEFAULT_CONFIG ) @@ -435,8 +427,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = notify.DOMAIN diff --git a/tests/components/mqtt/test_number.py b/tests/components/mqtt/test_number.py index 2cd5c5390f5..44652681fc3 100644 --- a/tests/components/mqtt/test_number.py +++ b/tests/components/mqtt/test_number.py @@ -557,11 +557,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG ) @@ -572,11 +568,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - number.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, number.DOMAIN, DEFAULT_CONFIG ) @@ -791,7 +783,6 @@ async def test_min_max_step_attributes( ], ) async def test_invalid_min_max_attributes( - hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture, ) -> None: @@ -871,7 +862,7 @@ async def test_default_mode( async def test_mode( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, - mode, + mode: str, ) -> None: """Test mode.""" await mqtt_mock_entry() @@ -1030,8 +1021,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = number.DOMAIN @@ -1082,8 +1072,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = number.DOMAIN diff --git a/tests/components/mqtt/test_scene.py b/tests/components/mqtt/test_scene.py index 9badd6aeee0..d78dbe5c003 100644 --- a/tests/components/mqtt/test_scene.py +++ b/tests/components/mqtt/test_scene.py @@ -263,11 +263,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG ) @@ -278,11 +274,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - scene.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, scene.DOMAIN, DEFAULT_CONFIG ) @@ -390,8 +382,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = scene.DOMAIN @@ -414,8 +405,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = scene.DOMAIN diff --git a/tests/components/mqtt/test_select.py b/tests/components/mqtt/test_select.py index 26a64d70fee..60eb4893760 100644 --- a/tests/components/mqtt/test_select.py +++ b/tests/components/mqtt/test_select.py @@ -67,9 +67,7 @@ DEFAULT_CONFIG = { } -def _test_run_select_setup_params( - topic: str, -) -> Generator[tuple[ConfigType, str], None]: +def _test_run_select_setup_params(topic: str) -> Generator[tuple[ConfigType, str]]: yield ( { mqtt.DOMAIN: { @@ -407,11 +405,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - select.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, select.DOMAIN, DEFAULT_CONFIG ) @@ -597,7 +591,7 @@ async def test_entity_debug_info_message( def _test_options_attributes_options_config( request: tuple[list[str]], -) -> Generator[tuple[ConfigType, list[str]], None]: +) -> Generator[tuple[ConfigType, list[str]]]: for option in request: yield ( { @@ -619,9 +613,7 @@ def _test_options_attributes_options_config( _test_options_attributes_options_config((["milk", "beer"], ["milk"], [])), ) async def test_options_attributes( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - options: list[str], + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, options: list[str] ) -> None: """Test options attribute.""" await mqtt_mock_entry() @@ -705,8 +697,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = select.DOMAIN @@ -759,8 +750,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = select.DOMAIN diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 94eb049dda7..4b117aaa4d5 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -617,9 +617,7 @@ async def test_setting_sensor_last_reset_via_mqtt_json_message( ], ) async def test_setting_sensor_last_reset_via_mqtt_json_message_2( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the setting of the value via MQTT with JSON payload.""" await hass.async_block_till_done() @@ -810,9 +808,7 @@ async def test_discovery_update_availability( ], ) async def test_invalid_device_class( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test device_class option with invalid value.""" assert await mqtt_mock_entry() @@ -871,9 +867,7 @@ async def test_valid_device_class_and_uom( ], ) async def test_invalid_state_class( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test state_class option with invalid value.""" assert await mqtt_mock_entry() @@ -954,11 +948,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG ) @@ -969,11 +959,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - sensor.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, sensor.DOMAIN, DEFAULT_CONFIG ) @@ -1298,8 +1284,7 @@ async def test_value_template_with_entity_id( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = sensor.DOMAIN @@ -1454,8 +1439,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = sensor.DOMAIN diff --git a/tests/components/mqtt/test_siren.py b/tests/components/mqtt/test_siren.py index c32c57d4f02..3f720e3ee3c 100644 --- a/tests/components/mqtt/test_siren.py +++ b/tests/components/mqtt/test_siren.py @@ -60,9 +60,7 @@ DEFAULT_CONFIG = { async def async_turn_on( - hass: HomeAssistant, - entity_id: str, - parameters: dict[str, Any], + hass: HomeAssistant, entity_id: str, parameters: dict[str, Any] ) -> None: """Turn all or specified siren on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} @@ -180,9 +178,7 @@ async def test_sending_mqtt_commands_and_optimistic( ], ) async def test_controlling_state_via_topic_and_json_message( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the controlling state via topic and JSON message.""" await mqtt_mock_entry() @@ -618,11 +614,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - siren.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG ) @@ -633,11 +625,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - siren.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, siren.DOMAIN, DEFAULT_CONFIG ) @@ -787,8 +775,7 @@ async def test_discovery_update_siren_template( ], ) async def test_command_templates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test siren with command templates optimistic.""" mqtt_mock = await mqtt_mock_entry() @@ -1005,8 +992,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = siren.DOMAIN @@ -1016,9 +1002,7 @@ async def test_reloadable( @pytest.mark.parametrize( ("topic", "value", "attribute", "attribute_value"), - [ - ("state_topic", "ON", None, "on"), - ], + [("state_topic", "ON", None, "on")], ) async def test_encoding_subscribable_topics( hass: HomeAssistant, @@ -1056,8 +1040,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = siren.DOMAIN diff --git a/tests/components/mqtt/test_subscription.py b/tests/components/mqtt/test_subscription.py index 7247458a667..86279b2006c 100644 --- a/tests/components/mqtt/test_subscription.py +++ b/tests/components/mqtt/test_subscription.py @@ -2,8 +2,6 @@ from unittest.mock import ANY -import pytest - from homeassistant.components.mqtt.subscription import ( async_prepare_subscribe_topics, async_subscribe_topics, @@ -16,9 +14,7 @@ from tests.typing import MqttMockHAClientGenerator async def test_subscribe_topics( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test subscription to topics.""" await mqtt_mock_entry() @@ -69,9 +65,7 @@ async def test_subscribe_topics( async def test_modify_topics( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test modification of topics.""" await mqtt_mock_entry() @@ -136,9 +130,7 @@ async def test_modify_topics( async def test_qos_encoding_default( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test default qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -158,9 +150,7 @@ async def test_qos_encoding_default( async def test_qos_encoding_custom( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test custom qos and encoding.""" mqtt_mock = await mqtt_mock_entry() @@ -187,9 +177,7 @@ async def test_qos_encoding_custom( async def test_no_change( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test subscription to topics without change.""" mqtt_mock = await mqtt_mock_entry() diff --git a/tests/components/mqtt/test_switch.py b/tests/components/mqtt/test_switch.py index 42d2e092d83..fddbfd8fbe2 100644 --- a/tests/components/mqtt/test_switch.py +++ b/tests/components/mqtt/test_switch.py @@ -191,6 +191,50 @@ async def test_sending_inital_state_and_optimistic( assert state.attributes.get(ATTR_ASSUMED_STATE) +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + switch.DOMAIN: { + "name": "test", + "command_topic": "command-topic", + "command_template": '{"state": "{{ value }}"}', + "payload_on": "beer on", + "payload_off": "beer off", + "qos": "2", + } + } + } + ], +) +async def test_sending_mqtt_commands_with_command_template( + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator +) -> None: + """Test the sending MQTT commands using a command template.""" + fake_state = State("switch.test", "on") + mock_restore_cache(hass, (fake_state,)) + + mqtt_mock = await mqtt_mock_entry() + + state = hass.states.get("switch.test") + assert state.state == STATE_ON + assert state.attributes.get(ATTR_ASSUMED_STATE) + + await common.async_turn_on(hass, "switch.test") + + mqtt_mock.async_publish.assert_called_once_with( + "command-topic", '{"state": "beer on"}', 2, False + ) + mqtt_mock.async_publish.reset_mock() + + await common.async_turn_off(hass, "switch.test") + + mqtt_mock.async_publish.assert_called_once_with( + "command-topic", '{"state": "beer off"}', 2, False + ) + + @pytest.mark.parametrize( "hass_config", [ @@ -379,11 +423,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - switch.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG ) @@ -394,11 +434,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - switch.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, switch.DOMAIN, DEFAULT_CONFIG ) @@ -524,8 +560,7 @@ async def test_discovery_update_switch_template( async def test_discovery_update_unchanged_switch( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered switch.""" data1 = ( @@ -675,8 +710,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = switch.DOMAIN @@ -726,8 +760,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = switch.DOMAIN diff --git a/tests/components/mqtt/test_tag.py b/tests/components/mqtt/test_tag.py index 0d0765258f2..adebd157588 100644 --- a/tests/components/mqtt/test_tag.py +++ b/tests/components/mqtt/test_tag.py @@ -1,11 +1,11 @@ """The tests for MQTT tag scanner.""" +from collections.abc import Generator import copy import json from unittest.mock import ANY, AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.mqtt.const import DOMAIN as MQTT_DOMAIN @@ -20,7 +20,7 @@ from tests.common import ( async_fire_mqtt_message, async_get_device_automations, ) -from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator +from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator DEFAULT_CONFIG_DEVICE = { "device": {"identifiers": ["0AFFD2"]}, @@ -102,9 +102,7 @@ async def test_if_fires_on_mqtt_message_with_device( async def test_if_fires_on_mqtt_message_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning, without device.""" await mqtt_mock_entry() @@ -140,9 +138,7 @@ async def test_if_fires_on_mqtt_message_with_template( async def test_strip_tag_id( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test strip whitespace from tag_id.""" await mqtt_mock_entry() @@ -208,9 +204,7 @@ async def test_if_fires_on_mqtt_message_after_update_with_device( async def test_if_fires_on_mqtt_message_after_update_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning after update.""" await mqtt_mock_entry() @@ -359,9 +353,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_with_device( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_without_device( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - tag_mock: AsyncMock, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, tag_mock: AsyncMock ) -> None: """Test tag scanning not firing after removal.""" await mqtt_mock_entry() @@ -904,11 +896,9 @@ async def test_update_with_bad_config_not_breaks_discovery( tag_mock.assert_called_once_with(ANY, "12345", ANY) +@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - tag_mock: AsyncMock, + hass: HomeAssistant, device_registry: dr.DeviceRegistry, tag_mock: AsyncMock ) -> None: """Test unloading the MQTT entry.""" @@ -934,12 +924,9 @@ async def test_unload_entry( tag_mock.assert_not_called() +@pytest.mark.usefixtures("mqtt_mock", "tag_mock") async def test_value_template_fails( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, - tag_mock: AsyncMock, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test the rendering of MQTT value template fails.""" config = copy.deepcopy(DEFAULT_CONFIG_DEVICE) diff --git a/tests/components/mqtt/test_text.py b/tests/components/mqtt/test_text.py index fc714efa513..ebcb835844d 100644 --- a/tests/components/mqtt/test_text.py +++ b/tests/components/mqtt/test_text.py @@ -251,9 +251,7 @@ async def test_controlling_validation_state_via_topic( ], ) async def test_attribute_validation_max_greater_then_min( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the validation of min and max configuration attributes.""" assert await mqtt_mock_entry() @@ -276,9 +274,7 @@ async def test_attribute_validation_max_greater_then_min( ], ) async def test_attribute_validation_max_not_greater_then_max_state_length( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the max value of of max configuration attribute.""" assert await mqtt_mock_entry() @@ -436,13 +432,7 @@ async def test_default_availability_payload( } } await help_test_default_availability_payload( - hass, - mqtt_mock_entry, - text.DOMAIN, - config, - True, - "state-topic", - "some state", + hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "some state" ) @@ -461,13 +451,7 @@ async def test_custom_availability_payload( } await help_test_custom_availability_payload( - hass, - mqtt_mock_entry, - text.DOMAIN, - config, - True, - "state-topic", - "1", + hass, mqtt_mock_entry, text.DOMAIN, config, True, "state-topic", "1" ) @@ -505,11 +489,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG ) @@ -520,11 +500,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - text.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, text.DOMAIN, DEFAULT_CONFIG ) @@ -754,8 +730,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = text.DOMAIN @@ -805,8 +780,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = text.DOMAIN diff --git a/tests/components/mqtt/test_trigger.py b/tests/components/mqtt/test_trigger.py index 2e0506a02ab..5bf36849b13 100644 --- a/tests/components/mqtt/test_trigger.py +++ b/tests/components/mqtt/test_trigger.py @@ -9,7 +9,7 @@ from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_O from homeassistant.core import HassJobType, HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_fire_mqtt_message, async_mock_service, mock_component +from tests.common import async_fire_mqtt_message, mock_component from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator @@ -18,12 +18,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) async def setup_comp( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator @@ -34,7 +28,7 @@ async def setup_comp( async def test_if_fires_on_topic_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic match.""" assert await async_setup_component( @@ -57,9 +51,10 @@ async def test_if_fires_on_topic_match( async_fire_mqtt_message(hass, "test-topic", '{ "hello": "world" }') await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == 'mqtt - test-topic - { "hello": "world" } - world - 0' + service_calls[0].data["some"] + == 'mqtt - test-topic - { "hello": "world" } - world - 0' ) await hass.services.async_call( @@ -68,13 +63,15 @@ async def test_if_fires_on_topic_match( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 + async_fire_mqtt_message(hass, "test-topic", "test_payload") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_topic_and_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match.""" assert await async_setup_component( @@ -94,11 +91,11 @@ async def test_if_fires_on_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_topic_and_payload_match2( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on topic and payload match. @@ -121,11 +118,11 @@ async def test_if_fires_on_topic_and_payload_match2( async_fire_mqtt_message(hass, "test-topic", "0") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_templated_topic_and_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -145,19 +142,19 @@ async def test_if_fires_on_templated_topic_and_payload_match( async_fire_mqtt_message(hass, "test-topic-", "foo") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "foo") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic-4", "bar") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_fires_on_payload_template( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is fired on templated topic and payload match.""" assert await async_setup_component( @@ -178,19 +175,21 @@ async def test_if_fires_on_payload_template( async_fire_mqtt_message(hass, "test-topic", "hello") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"unwanted_key":"hello"}') await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, "test-topic", '{"wanted_key":"hello"}') await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_non_allowed_templates( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + service_calls: list[ServiceCall], + caplog: pytest.LogCaptureFixture, ) -> None: """Test non allowed function in template.""" assert await async_setup_component( @@ -214,7 +213,7 @@ async def test_non_allowed_templates( async def test_if_not_fires_on_topic_but_no_payload_match( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test if message is not fired on topic but no payload.""" assert await async_setup_component( @@ -234,11 +233,11 @@ async def test_if_not_fires_on_topic_but_no_payload_match( async_fire_mqtt_message(hass, "test-topic", "no-hello") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_encoding_default( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp + hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( @@ -258,7 +257,7 @@ async def test_encoding_default( async def test_encoding_custom( - hass: HomeAssistant, calls: list[ServiceCall], setup_comp + hass: HomeAssistant, service_calls: list[ServiceCall], setup_comp ) -> None: """Test default encoding.""" assert await async_setup_component( diff --git a/tests/components/mqtt/test_update.py b/tests/components/mqtt/test_update.py index bb9ae12c66b..937b8cdebd0 100644 --- a/tests/components/mqtt/test_update.py +++ b/tests/components/mqtt/test_update.py @@ -504,11 +504,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG ) @@ -519,11 +515,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - update.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, update.DOMAIN, DEFAULT_CONFIG ) @@ -679,8 +671,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = update.DOMAIN @@ -691,8 +682,7 @@ async def test_unload_entry( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = update.DOMAIN diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 8c01138ccb9..7fc4ff981fd 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -119,16 +119,13 @@ async def test_warning_schema_option( await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("vacuum.test") + # We do not fail if the schema option is still in the payload, but we log an error assert state is not None with caplog.at_level(logging.WARNING): assert ( - "The `schema` option is deprecated for MQTT vacuum, but it was used in a " - "discovery payload. Please contact the maintainer of the integration or " - "service that supplies the config, and suggest to remove the option." - in caplog.text + "The 'schema' option has been removed, " + "please remove it from your configuration" in caplog.text ) - assert "https://example.com/support" in caplog.text - assert "at discovery topic homeassistant/vacuum/bla/config" in caplog.text @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) @@ -507,11 +504,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - DEFAULT_CONFIG_2, + hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 ) @@ -522,11 +515,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - vacuum.DOMAIN, - DEFAULT_CONFIG_2, + hass, mqtt_mock_entry, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2 ) @@ -682,20 +671,8 @@ async def test_entity_debug_info_message( @pytest.mark.parametrize( ("service", "topic", "parameters", "payload", "template"), [ - ( - vacuum.SERVICE_START, - "command_topic", - None, - "start", - None, - ), - ( - vacuum.SERVICE_CLEAN_SPOT, - "command_topic", - None, - "clean_spot", - None, - ), + (vacuum.SERVICE_START, "command_topic", None, "start", None), + (vacuum.SERVICE_CLEAN_SPOT, "command_topic", None, "clean_spot", None), ( vacuum.SERVICE_SET_FAN_SPEED, "set_fan_speed_topic", @@ -710,13 +687,7 @@ async def test_entity_debug_info_message( "custom command", None, ), - ( - vacuum.SERVICE_STOP, - "command_topic", - None, - "stop", - None, - ), + (vacuum.SERVICE_STOP, "command_topic", None, "stop", None), ], ) async def test_publishing_with_custom_encoding( @@ -760,8 +731,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = vacuum.DOMAIN diff --git a/tests/components/mqtt/test_valve.py b/tests/components/mqtt/test_valve.py index 6f88e160b73..53a7190eaf3 100644 --- a/tests/components/mqtt/test_valve.py +++ b/tests/components/mqtt/test_valve.py @@ -306,8 +306,7 @@ async def test_state_via_state_topic_through_position( ], ) async def test_opening_closing_state_is_reset( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test the controlling state via topic through position. @@ -734,11 +733,7 @@ async def test_controlling_valve_by_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [ - (0, "0"), - (30, "30"), - (100, "100"), - ], + [(0, "0"), (30, "30"), (100, "100")], ) async def test_controlling_valve_by_set_valve_position( hass: HomeAssistant, @@ -842,12 +837,7 @@ async def test_controlling_valve_optimistic_by_set_valve_position( ) @pytest.mark.parametrize( ("position", "asserted_message"), - [ - (0, "-128"), - (30, "-52"), - (80, "76"), - (100, "127"), - ], + [(0, "-128"), (30, "-52"), (80, "76"), (100, "127")], ) async def test_controlling_valve_with_alt_range_by_set_valve_position( hass: HomeAssistant, @@ -1127,9 +1117,7 @@ async def test_valid_device_class( ], ) async def test_invalid_device_class( - hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - mqtt_mock_entry: MqttMockHAClientGenerator, + mqtt_mock_entry: MqttMockHAClientGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test the setting of an invalid device class.""" assert await mqtt_mock_entry() @@ -1174,11 +1162,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG ) @@ -1189,17 +1173,12 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - valve.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, valve.DOMAIN, DEFAULT_CONFIG ) async def test_discovery_update_attr( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test update of discovered MQTTAttributes.""" await help_test_discovery_update_attr( @@ -1386,8 +1365,7 @@ async def test_publishing_with_custom_encoding( async def test_reloadable( - hass: HomeAssistant, - mqtt_client_mock: MqttMockPahoClient, + hass: HomeAssistant, mqtt_client_mock: MqttMockPahoClient ) -> None: """Test reloading the MQTT platform.""" domain = valve.DOMAIN @@ -1439,8 +1417,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = valve.DOMAIN diff --git a/tests/components/mqtt/test_water_heater.py b/tests/components/mqtt/test_water_heater.py index 849a1ac8785..7bab4a5e233 100644 --- a/tests/components/mqtt/test_water_heater.py +++ b/tests/components/mqtt/test_water_heater.py @@ -141,7 +141,7 @@ async def test_get_operation_modes( await mqtt_mock_entry() state = hass.states.get(ENTITY_WATER_HEATER) - assert [ + assert state.attributes.get("operation_list") == [ STATE_ECO, STATE_ELECTRIC, STATE_GAS, @@ -149,14 +149,12 @@ async def test_get_operation_modes( STATE_HIGH_DEMAND, STATE_PERFORMANCE, STATE_OFF, - ] == state.attributes.get("operation_list") + ] @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_set_operation_mode_bad_attr_and_state( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting operation mode without required attribute.""" await mqtt_mock_entry() @@ -615,8 +613,7 @@ async def test_get_with_templates( ], ) async def test_set_and_templates( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test setting various attributes with templates.""" mqtt_mock = await mqtt_mock_entry() @@ -834,11 +831,7 @@ async def test_update_with_json_attrs_not_dict( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_not_dict( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG ) @@ -849,11 +842,7 @@ async def test_update_with_json_attrs_bad_json( ) -> None: """Test attributes get extracted from a JSON result.""" await help_test_update_with_json_attrs_bad_json( - hass, - mqtt_mock_entry, - caplog, - water_heater.DOMAIN, - DEFAULT_CONFIG, + hass, mqtt_mock_entry, caplog, water_heater.DOMAIN, DEFAULT_CONFIG ) @@ -1020,11 +1009,7 @@ async def test_entity_id_update_subscriptions( } } await help_test_entity_id_update_subscriptions( - hass, - mqtt_mock_entry, - water_heater.DOMAIN, - config, - ["test-topic", "avty-topic"], + hass, mqtt_mock_entry, water_heater.DOMAIN, config, ["test-topic", "avty-topic"] ) @@ -1200,8 +1185,7 @@ async def test_setup_manual_entity_from_yaml( async def test_unload_entry( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, + hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator ) -> None: """Test unloading the config entry.""" domain = water_heater.DOMAIN diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index a992c985057..36073c11a5d 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -1,12 +1,12 @@ """The tests for the JSON MQTT device tracker platform.""" +from collections.abc import AsyncGenerator import json import logging import os from unittest.mock import patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components.device_tracker.legacy import ( DOMAIN as DT_DOMAIN, diff --git a/tests/components/mysensors/conftest.py b/tests/components/mysensors/conftest.py index f1b86c9ce5b..b6fce35a4c7 100644 --- a/tests/components/mysensors/conftest.py +++ b/tests/components/mysensors/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import AsyncGenerator, Callable, Generator from copy import deepcopy import json from typing import Any @@ -12,7 +12,6 @@ from mysensors import BaseSyncGateway from mysensors.persistence import MySensorsJSONDecoder from mysensors.sensor import Sensor import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.components.mysensors.config_flow import DEFAULT_BAUD_RATE diff --git a/tests/components/mystrom/conftest.py b/tests/components/mystrom/conftest.py index f5405055805..af8d80ed27e 100644 --- a/tests/components/mystrom/conftest.py +++ b/tests/components/mystrom/conftest.py @@ -1,9 +1,9 @@ """Provide common mystrom fixtures and mocks.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.mystrom.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/myuplink/conftest.py b/tests/components/myuplink/conftest.py index dd05bedcaf4..9ede11146ef 100644 --- a/tests/components/myuplink/conftest.py +++ b/tests/components/myuplink/conftest.py @@ -1,5 +1,6 @@ """Test helpers for myuplink.""" +from collections.abc import AsyncGenerator, Generator import time from typing import Any from unittest.mock import MagicMock, patch @@ -7,7 +8,6 @@ from unittest.mock import MagicMock, patch from myuplink import Device, DevicePoint, System import orjson import pytest -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/myuplink/fixtures/device_points_nibe_f730.json b/tests/components/myuplink/fixtures/device_points_nibe_f730.json index 49340bd9e26..9ec5db0ea3b 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_f730.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_f730.json @@ -951,5 +951,43 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json index b64869c236c..9135862d991 100644 --- a/tests/components/myuplink/fixtures/device_points_nibe_smo20.json +++ b/tests/components/myuplink/fixtures/device_points_nibe_smo20.json @@ -3996,9 +3996,9 @@ "parameterUnit": "", "writable": true, "timestamp": "2024-02-14T08:36:05+00:00", - "value": 0, + "value": 0.0, "strVal": "economy", - "smartHomeCategories": [], + "smartHomeCategories": ["test"], "minValue": null, "maxValue": null, "stepValue": 1, diff --git a/tests/components/myuplink/snapshots/test_diagnostics.ambr b/tests/components/myuplink/snapshots/test_diagnostics.ambr index 53664820364..9160fd3b365 100644 --- a/tests/components/myuplink/snapshots/test_diagnostics.ambr +++ b/tests/components/myuplink/snapshots/test_diagnostics.ambr @@ -1012,6 +1012,44 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] @@ -2017,6 +2055,44 @@ ], "scaleValue": "1", "zoneId": null + }, + { + "category": "NIBEF F730 CU 3x400V", + "parameterId": "47041", + "parameterName": "comfort mode", + "parameterUnit": "", + "writable": true, + "timestamp": "2024-05-22T15:02:03+00:00", + "value": 0, + "strVal": "economy", + "smartHomeCategories": [], + "minValue": null, + "maxValue": null, + "stepValue": 1, + "enumValues": [ + { + "value": "4", + "text": "smart control", + "icon": "" + }, + { + "value": "0", + "text": "economy", + "icon": "" + }, + { + "value": "1", + "text": "normal", + "icon": "" + }, + { + "value": "2", + "text": "luxury", + "icon": "" + } + ], + "scaleValue": "1", + "zoneId": null } ] diff --git a/tests/components/myuplink/test_select.py b/tests/components/myuplink/test_select.py new file mode 100644 index 00000000000..7ad2d17cb5d --- /dev/null +++ b/tests/components/myuplink/test_select.py @@ -0,0 +1,89 @@ +"""Tests for myuplink select module.""" + +from unittest.mock import MagicMock + +from aiohttp import ClientError +import pytest + +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_OPTION, + SERVICE_SELECT_OPTION, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +TEST_PLATFORM = Platform.SELECT +pytestmark = pytest.mark.parametrize("platforms", [(TEST_PLATFORM,)]) + +ENTITY_ID = "select.gotham_city_comfort_mode" +ENTITY_FRIENDLY_NAME = "Gotham City comfort mode" +ENTITY_UID = "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47041" + + +async def test_select_entity( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test that the entities are registered in the entity registry.""" + + entry = entity_registry.async_get(ENTITY_ID) + assert entry.unique_id == ENTITY_UID + + # Test the select attributes are correct. + + state = hass.states.get(ENTITY_ID) + assert state.state == "Economy" + assert state.attributes == { + "options": ["Smart control", "Economy", "Normal", "Luxury"], + "friendly_name": ENTITY_FRIENDLY_NAME, + } + + +async def test_selecting( + hass: HomeAssistant, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test select option service.""" + + await hass.services.async_call( + TEST_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, + blocking=True, + ) + await hass.async_block_till_done() + mock_myuplink_client.async_set_device_points.assert_called_once() + + # Test handling of exception from API. + + mock_myuplink_client.async_set_device_points.side_effect = ClientError + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + TEST_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_OPTION: "Economy"}, + blocking=True, + ) + assert mock_myuplink_client.async_set_device_points.call_count == 2 + + +@pytest.mark.parametrize( + "load_device_points_file", + ["device_points_nibe_smo20.json"], +) +async def test_entity_registry_smo20( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_myuplink_client: MagicMock, + setup_platform: None, +) -> None: + """Test that the entities are registered in the entity registry.""" + + entry = entity_registry.async_get("select.gotham_city_all") + assert entry.unique_id == "robin-r-1234-20240201-123456-aa-bb-cc-dd-ee-ff-47660" diff --git a/tests/components/nam/snapshots/test_diagnostics.ambr b/tests/components/nam/snapshots/test_diagnostics.ambr index c187dec2866..e92e02fa1d8 100644 --- a/tests/components/nam/snapshots/test_diagnostics.ambr +++ b/tests/components/nam/snapshots/test_diagnostics.ambr @@ -2,18 +2,18 @@ # name: test_entry_diagnostics dict({ 'data': dict({ - 'bme280_humidity': 45.7, - 'bme280_pressure': 1011.012, - 'bme280_temperature': 7.6, - 'bmp180_pressure': 1032.012, - 'bmp180_temperature': 7.6, - 'bmp280_pressure': 1022.012, - 'bmp280_temperature': 5.6, - 'dht22_humidity': 46.2, - 'dht22_temperature': 6.3, - 'ds18b20_temperature': 12.6, - 'heca_humidity': 50.0, - 'heca_temperature': 8.0, + 'bme280_humidity': 45.69, + 'bme280_pressure': 1011.0117, + 'bme280_temperature': 7.56, + 'bmp180_pressure': 1032.0118, + 'bmp180_temperature': 7.56, + 'bmp280_pressure': 1022.0117999999999, + 'bmp280_temperature': 5.56, + 'dht22_humidity': 46.23, + 'dht22_temperature': 6.26, + 'ds18b20_temperature': 12.56, + 'heca_humidity': 49.97, + 'heca_temperature': 7.95, 'mhz14a_carbon_dioxide': 865.0, 'pms_caqi': 19, 'pms_caqi_level': 'very_low', @@ -22,17 +22,17 @@ 'pms_p2': 11.0, 'sds011_caqi': 19, 'sds011_caqi_level': 'very_low', - 'sds011_p1': 18.6, - 'sds011_p2': 11.0, - 'sht3x_humidity': 34.7, - 'sht3x_temperature': 6.3, + 'sds011_p1': 18.65, + 'sds011_p2': 11.03, + 'sht3x_humidity': 34.69, + 'sht3x_temperature': 6.28, 'signal': -72.0, 'sps30_caqi': 54, 'sps30_caqi_level': 'medium', - 'sps30_p0': 31.2, - 'sps30_p1': 21.2, - 'sps30_p2': 34.3, - 'sps30_p4': 24.7, + 'sps30_p0': 31.23, + 'sps30_p1': 21.23, + 'sps30_p2': 34.32, + 'sps30_p4': 24.72, 'uptime': 456987, }), 'info': dict({ diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index ea47998f3de..426b2ff2e03 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -97,7 +97,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '45.7', + 'state': '45.69', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_pressure-entry] @@ -151,7 +151,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1011.012', + 'state': '1011.0117', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_temperature-entry] @@ -205,7 +205,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.6', + 'state': '7.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_pressure-entry] @@ -259,7 +259,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1032.012', + 'state': '1032.0118', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp180_temperature-entry] @@ -313,7 +313,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '7.6', + 'state': '7.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_pressure-entry] @@ -367,7 +367,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1022.012', + 'state': '1022.0118', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_bmp280_temperature-entry] @@ -421,7 +421,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '5.6', + 'state': '5.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_humidity-entry] @@ -475,7 +475,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '46.2', + 'state': '46.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_dht22_temperature-entry] @@ -529,7 +529,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.3', + 'state': '6.26', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_ds18b20_temperature-entry] @@ -583,7 +583,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '12.6', + 'state': '12.56', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_humidity-entry] @@ -637,7 +637,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '50.0', + 'state': '49.97', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_heca_temperature-entry] @@ -691,7 +691,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '8.0', + 'state': '7.95', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_last_restart-entry] @@ -1224,7 +1224,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '18.6', + 'state': '18.65', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sds011_pm2_5-entry] @@ -1278,7 +1278,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '11.0', + 'state': '11.03', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_humidity-entry] @@ -1332,7 +1332,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.7', + 'state': '34.69', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sht3x_temperature-entry] @@ -1386,7 +1386,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '6.3', + 'state': '6.28', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_signal_strength-entry] @@ -1602,7 +1602,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '31.2', + 'state': '31.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm10-entry] @@ -1656,7 +1656,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '21.2', + 'state': '21.23', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm2_5-entry] @@ -1710,7 +1710,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '34.3', + 'state': '34.32', }) # --- # name: test_sensor[sensor.nettigo_air_monitor_sps30_pm4-entry] @@ -1763,6 +1763,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '24.7', + 'state': '24.72', }) # --- diff --git a/tests/components/nam/test_init.py b/tests/components/nam/test_init.py index 8b8c3a4835a..13bde1432b3 100644 --- a/tests/components/nam/test_init.py +++ b/tests/components/nam/test_init.py @@ -23,7 +23,7 @@ async def test_async_setup_entry(hass: HomeAssistant) -> None: state = hass.states.get("sensor.nettigo_air_monitor_sds011_pm2_5") assert state is not None assert state.state != STATE_UNAVAILABLE - assert state.state == "11.0" + assert state.state == "11.03" async def test_config_not_ready(hass: HomeAssistant) -> None: diff --git a/tests/components/nam/test_sensor.py b/tests/components/nam/test_sensor.py index 53945e1c8a2..6924af48f01 100644 --- a/tests/components/nam/test_sensor.py +++ b/tests/components/nam/test_sensor.py @@ -77,7 +77,7 @@ async def test_incompleta_data_after_device_restart(hass: HomeAssistant) -> None state = hass.states.get("sensor.nettigo_air_monitor_heca_temperature") assert state - assert state.state == "8.0" + assert state.state == "7.95" assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TEMPERATURE assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS @@ -110,7 +110,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.6" + assert state.state == "7.56" with ( patch("homeassistant.components.nam.NettigoAirMonitor.initialize"), @@ -142,7 +142,7 @@ async def test_availability( state = hass.states.get("sensor.nettigo_air_monitor_bme280_temperature") assert state assert state.state != STATE_UNAVAILABLE - assert state.state == "7.6" + assert state.state == "7.56" async def test_manual_update_entity(hass: HomeAssistant) -> None: diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index bbaa92b7b28..0a553f9c114 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator import copy from dataclasses import dataclass, field import time @@ -14,7 +14,6 @@ from google_nest_sdm.device_manager import DeviceManager from google_nest_sdm.event import EventMessage from google_nest_sdm.event_media import CachePolicy from google_nest_sdm.google_nest_subscriber import GoogleNestSubscriber -from typing_extensions import Generator from homeassistant.components.application_credentials import ClientCredential from homeassistant.components.nest import DOMAIN diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index de0fc2079fa..4b64e80543b 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -3,6 +3,7 @@ from __future__ import annotations from asyncio import AbstractEventLoop +from collections.abc import Generator import copy import shutil import time @@ -15,7 +16,6 @@ from google_nest_sdm import diagnostics from google_nest_sdm.auth import AbstractAuth from google_nest_sdm.device_manager import DeviceManager import pytest -from typing_extensions import Generator from homeassistant.components.application_credentials import ( async_import_client_credential, @@ -109,7 +109,7 @@ async def auth(aiohttp_client: ClientSessionGenerator) -> FakeAuth: @pytest.fixture(autouse=True) -def cleanup_media_storage(hass): +def cleanup_media_storage(hass: HomeAssistant) -> Generator[None]: """Test cleanup, remove any media storage persisted during the test.""" tmp_path = str(uuid.uuid4()) with patch("homeassistant.components.nest.media_source.MEDIA_PATH", new=tmp_path): diff --git a/tests/components/nest/test_api.py b/tests/components/nest/test_api.py index fd07233fa8c..a13d4d3a337 100644 --- a/tests/components/nest/test_api.py +++ b/tests/components/nest/test_api.py @@ -18,7 +18,7 @@ from homeassistant.components.nest.const import API_URL, OAUTH2_TOKEN, SDM_SCOPE from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, PlatformSetup +from .common import CLIENT_ID, CLIENT_SECRET, PROJECT_ID, FakeSubscriber, PlatformSetup from .conftest import FAKE_REFRESH_TOKEN, FAKE_TOKEN from tests.test_util.aiohttp import AiohttpClientMocker @@ -27,7 +27,7 @@ FAKE_UPDATED_TOKEN = "fake-updated-token" @pytest.fixture -def subscriber() -> None: +def subscriber() -> FakeSubscriber | None: """Disable default subscriber since tests use their own patch.""" return None diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index 1838c18b6d4..fd2b5ef0388 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -4,6 +4,7 @@ These tests fake out the subscriber/devicemanager, and are not using a real pubsub subscriber. """ +from collections.abc import Generator import datetime from http import HTTPStatus from unittest.mock import AsyncMock, Mock, patch @@ -12,7 +13,6 @@ import aiohttp from freezegun import freeze_time from google_nest_sdm.event import EventMessage import pytest -from typing_extensions import Generator from homeassistant.components import camera from homeassistant.components.camera import STATE_IDLE, STATE_STREAMING, StreamType diff --git a/tests/components/nest/test_device_trigger.py b/tests/components/nest/test_device_trigger.py index 1820096d2a6..f818713d382 100644 --- a/tests/components/nest/test_device_trigger.py +++ b/tests/components/nest/test_device_trigger.py @@ -20,7 +20,7 @@ from homeassistant.util.dt import utcnow from .common import DEVICE_ID, CreateDevice, FakeSubscriber, PlatformSetup -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations DEVICE_NAME = "My Camera" DATA_MESSAGE = {"message": "service-called"} @@ -83,12 +83,6 @@ async def setup_automation(hass, device_id, trigger_type): ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -248,7 +242,7 @@ async def test_fires_on_camera_motion( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_motion triggers firing.""" create_device.create( @@ -273,8 +267,8 @@ async def test_fires_on_camera_motion( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_camera_person( @@ -282,7 +276,7 @@ async def test_fires_on_camera_person( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_person triggers firing.""" create_device.create( @@ -307,8 +301,8 @@ async def test_fires_on_camera_person( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_camera_sound( @@ -316,7 +310,7 @@ async def test_fires_on_camera_sound( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test camera_sound triggers firing.""" create_device.create( @@ -341,8 +335,8 @@ async def test_fires_on_camera_sound( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_fires_on_doorbell_chime( @@ -350,7 +344,7 @@ async def test_fires_on_doorbell_chime( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test doorbell_chime triggers firing.""" create_device.create( @@ -375,8 +369,8 @@ async def test_fires_on_doorbell_chime( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE async def test_trigger_for_wrong_device_id( @@ -384,7 +378,7 @@ async def test_trigger_for_wrong_device_id( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test messages for the wrong device are ignored.""" create_device.create( @@ -409,7 +403,7 @@ async def test_trigger_for_wrong_device_id( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_trigger_for_wrong_event_type( @@ -417,7 +411,7 @@ async def test_trigger_for_wrong_event_type( device_registry: dr.DeviceRegistry, create_device: CreateDevice, setup_platform: PlatformSetup, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test that messages for the wrong event type are ignored.""" create_device.create( @@ -442,13 +436,13 @@ async def test_trigger_for_wrong_event_type( } hass.bus.async_fire(NEST_EVENT, message) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_subscriber_automation( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], create_device: CreateDevice, setup_platform: PlatformSetup, subscriber: FakeSubscriber, @@ -488,5 +482,5 @@ async def test_subscriber_automation( await subscriber.async_receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data == DATA_MESSAGE + assert len(service_calls) == 1 + assert service_calls[0].data == DATA_MESSAGE diff --git a/tests/components/nest/test_init.py b/tests/components/nest/test_init.py index f9813ca63ee..f3226c936fb 100644 --- a/tests/components/nest/test_init.py +++ b/tests/components/nest/test_init.py @@ -8,6 +8,7 @@ mode (e.g. yaml, ConfigEntry, etc) however some tests override and just run in relevant modes. """ +from collections.abc import Generator import logging from typing import Any from unittest.mock import patch @@ -19,7 +20,6 @@ from google_nest_sdm.exceptions import ( SubscriberException, ) import pytest -from typing_extensions import Generator from homeassistant.components.nest import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -67,13 +67,15 @@ def warning_caplog( @pytest.fixture -def subscriber_side_effect() -> None: +def subscriber_side_effect() -> Any | None: """Fixture to inject failures into FakeSubscriber start.""" return None @pytest.fixture -def failing_subscriber(subscriber_side_effect: Any) -> YieldFixture[FakeSubscriber]: +def failing_subscriber( + subscriber_side_effect: Any | None, +) -> YieldFixture[FakeSubscriber]: """Fixture overriding default subscriber behavior to allow failure injection.""" subscriber = FakeSubscriber() with patch( diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index f4fb8bdb623..3cfa4ee6687 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -4,6 +4,7 @@ These tests simulate recent camera events received by the subscriber exposed as media in the media source. """ +from collections.abc import Generator import datetime from http import HTTPStatus import io @@ -15,7 +16,6 @@ import av from google_nest_sdm.event import EventMessage import numpy as np import pytest -from typing_extensions import Generator from homeassistant.components.media_player.errors import BrowseError from homeassistant.components.media_source import ( diff --git a/tests/components/netatmo/common.py b/tests/components/netatmo/common.py index 08c8679acf3..730cb0cb117 100644 --- a/tests/components/netatmo/common.py +++ b/tests/components/netatmo/common.py @@ -1,9 +1,10 @@ """Common methods used across tests for Netatmo.""" +from collections.abc import Iterator from contextlib import contextmanager import json from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import patch from syrupy import SnapshotAssertion @@ -86,7 +87,7 @@ async def fake_post_request(*args: Any, **kwargs: Any): ) -async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str: +async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str | None: """Return fake data.""" if "endpoint" not in kwargs: return "{}" @@ -95,6 +96,7 @@ async def fake_get_image(*args: Any, **kwargs: Any) -> bytes | str: if endpoint in "snapshot_720.jpg": return b"test stream image bytes" + return None async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> None: @@ -109,7 +111,7 @@ async def simulate_webhook(hass: HomeAssistant, webhook_id: str, response) -> No @contextmanager -def selected_platforms(platforms: list[Platform]) -> AsyncMock: +def selected_platforms(platforms: list[Platform]) -> Iterator[None]: """Restrict loaded platforms to list given.""" with ( patch("homeassistant.components.netatmo.data_handler.PLATFORMS", platforms), diff --git a/tests/components/netatmo/conftest.py b/tests/components/netatmo/conftest.py index d2e6c1fdc88..b79e6480711 100644 --- a/tests/components/netatmo/conftest.py +++ b/tests/components/netatmo/conftest.py @@ -69,6 +69,15 @@ def mock_config_entry_fixture(hass: HomeAssistant) -> MockConfigEntry: "area_name": "Home max", "mode": "max", }, + "Home min": { + "lat_ne": 32.2345678, + "lon_ne": -117.1234567, + "lat_sw": 32.1234567, + "lon_sw": -117.2345678, + "show_on_map": True, + "area_name": "Home min", + "mode": "min", + }, } }, ) diff --git a/tests/components/netatmo/snapshots/test_diagnostics.ambr b/tests/components/netatmo/snapshots/test_diagnostics.ambr index 8ce00279b83..35cd0bfbf47 100644 --- a/tests/components/netatmo/snapshots/test_diagnostics.ambr +++ b/tests/components/netatmo/snapshots/test_diagnostics.ambr @@ -630,6 +630,15 @@ 'mode': 'max', 'show_on_map': True, }), + 'Home min': dict({ + 'area_name': 'Home min', + 'lat_ne': '**REDACTED**', + 'lat_sw': '**REDACTED**', + 'lon_ne': '**REDACTED**', + 'lon_sw': '**REDACTED**', + 'mode': 'min', + 'show_on_map': True, + }), }), }), 'pref_disable_new_entities': False, diff --git a/tests/components/netatmo/snapshots/test_init.ambr b/tests/components/netatmo/snapshots/test_init.ambr index 38a54f507a0..60cb22d74f2 100644 --- a/tests/components/netatmo/snapshots/test_init.ambr +++ b/tests/components/netatmo/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Roller Shutter', + 'model_id': None, 'name': 'Entrance Blinds', 'name_by_user': None, 'primary_config_entry': , @@ -52,6 +53,7 @@ }), 'manufacturer': 'Bubbendorf', 'model': 'Orientable Shutter', + 'model_id': None, 'name': 'Bubendorff blind', 'name_by_user': None, 'primary_config_entry': , @@ -83,6 +85,7 @@ }), 'manufacturer': 'Legrand', 'model': '2 wire light switch/dimmer', + 'model_id': None, 'name': 'Unknown 00:11:22:33:00:11:45:fe', 'name_by_user': None, 'primary_config_entry': , @@ -114,6 +117,7 @@ }), 'manufacturer': 'Smarther', 'model': 'Smarther with Netatmo', + 'model_id': None, 'name': 'Corridor', 'name_by_user': None, 'primary_config_entry': , @@ -145,6 +149,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Energy Meter', + 'model_id': None, 'name': 'Consumption meter', 'name_by_user': None, 'primary_config_entry': , @@ -176,6 +181,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Light switch/dimmer with neutral', + 'model_id': None, 'name': 'Bathroom light', 'name_by_user': None, 'primary_config_entry': , @@ -207,6 +213,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 1', 'name_by_user': None, 'primary_config_entry': , @@ -238,6 +245,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 2', 'name_by_user': None, 'primary_config_entry': , @@ -269,6 +277,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 3', 'name_by_user': None, 'primary_config_entry': , @@ -300,6 +309,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 4', 'name_by_user': None, 'primary_config_entry': , @@ -331,6 +341,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Line 5', 'name_by_user': None, 'primary_config_entry': , @@ -362,6 +373,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Total', 'name_by_user': None, 'primary_config_entry': , @@ -393,6 +405,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Gas', 'name_by_user': None, 'primary_config_entry': , @@ -424,6 +437,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Hot water', 'name_by_user': None, 'primary_config_entry': , @@ -455,6 +469,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Cold water', 'name_by_user': None, 'primary_config_entry': , @@ -486,6 +501,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Connected Ecometer', + 'model_id': None, 'name': 'Écocompteur', 'name_by_user': None, 'primary_config_entry': , @@ -517,6 +533,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Camera', + 'model_id': None, 'name': 'Hall', 'name_by_user': None, 'primary_config_entry': , @@ -548,6 +565,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Anemometer', + 'model_id': None, 'name': 'Villa Garden', 'name_by_user': None, 'primary_config_entry': , @@ -579,6 +597,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Camera', + 'model_id': None, 'name': 'Front', 'name_by_user': None, 'primary_config_entry': , @@ -610,6 +629,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Video Doorbell', + 'model_id': None, 'name': 'Netatmo-Doorbell', 'name_by_user': None, 'primary_config_entry': , @@ -641,6 +661,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Kitchen', 'name_by_user': None, 'primary_config_entry': , @@ -672,6 +693,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, 'primary_config_entry': , @@ -703,6 +725,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Baby Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -734,6 +757,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -765,6 +789,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Air Quality Monitor', + 'model_id': None, 'name': 'Parents Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -796,6 +821,7 @@ }), 'manufacturer': 'Legrand', 'model': 'Plug', + 'model_id': None, 'name': 'Prise', 'name_by_user': None, 'primary_config_entry': , @@ -827,6 +853,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Outdoor Module', + 'model_id': None, 'name': 'Villa Outdoor', 'name_by_user': None, 'primary_config_entry': , @@ -858,6 +885,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', + 'model_id': None, 'name': 'Villa Bedroom', 'name_by_user': None, 'primary_config_entry': , @@ -889,6 +917,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Indoor Module', + 'model_id': None, 'name': 'Villa Bathroom', 'name_by_user': None, 'primary_config_entry': , @@ -920,6 +949,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Home Weather station', + 'model_id': None, 'name': 'Villa', 'name_by_user': None, 'primary_config_entry': , @@ -951,6 +981,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Rain Gauge', + 'model_id': None, 'name': 'Villa Rain', 'name_by_user': None, 'primary_config_entry': , @@ -982,6 +1013,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'OpenTherm Modulating Thermostat', + 'model_id': None, 'name': 'Bureau Modulate', 'name_by_user': None, 'primary_config_entry': , @@ -1013,6 +1045,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Thermostat', + 'model_id': None, 'name': 'Livingroom', 'name_by_user': None, 'primary_config_entry': , @@ -1044,6 +1077,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', + 'model_id': None, 'name': 'Valve1', 'name_by_user': None, 'primary_config_entry': , @@ -1075,6 +1109,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Smart Valve', + 'model_id': None, 'name': 'Valve2', 'name_by_user': None, 'primary_config_entry': , @@ -1106,6 +1141,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Climate', + 'model_id': None, 'name': 'MYHOME', 'name_by_user': None, 'primary_config_entry': , @@ -1137,6 +1173,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', + 'model_id': None, 'name': 'Home avg', 'name_by_user': None, 'primary_config_entry': , @@ -1168,6 +1205,7 @@ }), 'manufacturer': 'Netatmo', 'model': 'Public Weather station', + 'model_id': None, 'name': 'Home max', 'name_by_user': None, 'primary_config_entry': , @@ -1177,3 +1215,35 @@ 'via_device_id': None, }) # --- +# name: test_devices[netatmo-Home min] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'https://weathermap.netatmo.com/', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'netatmo', + 'Home min', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Netatmo', + 'model': 'Public Weather station', + 'model_id': None, + 'name': 'Home min', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index 6ab1e4b1e1a..bc2a18d918d 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -2663,6 +2663,556 @@ 'state': '15', }) # --- +# name: test_entity[sensor.home_min_atmospheric_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_atmospheric_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Atmospheric pressure', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_atmospheric_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'Home min Atmospheric pressure', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_atmospheric_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1005.4', + }) +# --- +# name: test_entity[sensor.home_min_gust_angle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_gust_angle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Gust angle', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gust_angle', + 'unique_id': 'Home-min-gustangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_gust_angle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'friendly_name': 'Home min Gust angle', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_gust_angle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '217', + }) +# --- +# name: test_entity[sensor.home_min_gust_strength-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_gust_strength', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Gust strength', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gust_strength', + 'unique_id': 'Home-min-guststrength', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_gust_strength-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_speed', + 'friendly_name': 'Home min Gust strength', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_gust_strength', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '31', + }) +# --- +# name: test_entity[sensor.home_min_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-humidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity[sensor.home_min_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'humidity', + 'friendly_name': 'Home min Humidity', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.home_min_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- +# name: test_entity[sensor.home_min_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windangle_value', + 'unit_of_measurement': '°', + }) +# --- +# name: test_entity[sensor.home_min_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'friendly_name': 'Home min None', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'sensor.home_min_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- +# name: test_entity[sensor.home_min_precipitation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-rain', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity[sensor.home_min_precipitation_last_hour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation_last_hour', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation last hour', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_rain_1', + 'unique_id': 'Home-min-sum_rain_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation_last_hour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation last hour', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation_last_hour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity[sensor.home_min_precipitation_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_precipitation_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Precipitation today', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sum_rain_24', + 'unique_id': 'Home-min-sum_rain_24', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_precipitation_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'precipitation', + 'friendly_name': 'Home min Precipitation today', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_precipitation_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.999', + }) +# --- +# name: test_entity[sensor.home_min_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'temperature', + 'friendly_name': 'Home min Temperature', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '19.8', + }) +# --- +# name: test_entity[sensor.home_min_wind_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_min_wind_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind speed', + 'platform': 'netatmo', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'Home-min-windstrength', + 'unit_of_measurement': , + }) +# --- +# name: test_entity[sensor.home_min_wind_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by Netatmo', + 'device_class': 'wind_speed', + 'friendly_name': 'Home min Wind speed', + 'latitude': 32.17901225, + 'longitude': -117.17901225, + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_min_wind_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '15', + }) +# --- # name: test_entity[sensor.hot_water_none-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/netatmo/test_device_trigger.py b/tests/components/netatmo/test_device_trigger.py index ad1e9bd8cb9..99709572024 100644 --- a/tests/components/netatmo/test_device_trigger.py +++ b/tests/components/netatmo/test_device_trigger.py @@ -22,16 +22,9 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, - async_mock_service, ) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( ("platform", "device_type", "event_types"), [ @@ -113,7 +106,7 @@ async def test_get_triggers( ) async def test_if_fires_on_event( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -175,8 +168,8 @@ async def test_if_fires_on_event( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -196,7 +189,7 @@ async def test_if_fires_on_event( ) async def test_if_fires_on_event_legacy( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -258,8 +251,8 @@ async def test_if_fires_on_event_legacy( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - device - {device.id}" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == f"{event_type} - device - {device.id}" @pytest.mark.parametrize( @@ -275,7 +268,7 @@ async def test_if_fires_on_event_legacy( ) async def test_if_fires_on_event_with_subtype( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, platform, @@ -343,8 +336,11 @@ async def test_if_fires_on_event_with_subtype( ) await hass.async_block_till_done() assert len(events) == 1 - assert len(calls) == 1 - assert calls[0].data["some"] == f"{event_type} - {sub_type} - device - {device.id}" + assert len(service_calls) == 1 + assert ( + service_calls[0].data["some"] + == f"{event_type} - {sub_type} - device - {device.id}" + ) @pytest.mark.parametrize( diff --git a/tests/components/netatmo/test_diagnostics.py b/tests/components/netatmo/test_diagnostics.py index 48f021295e1..7a0bf11c652 100644 --- a/tests/components/netatmo/test_diagnostics.py +++ b/tests/components/netatmo/test_diagnostics.py @@ -42,4 +42,11 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=paths("info.data.token.expires_at", "info.entry_id")) + ) == snapshot( + exclude=paths( + "info.data.token.expires_at", + "info.entry_id", + "info.created_at", + "info.modified_at", + ) + ) diff --git a/tests/components/netatmo/test_sensor.py b/tests/components/netatmo/test_sensor.py index 3c16e6e60f9..2c47cdefa60 100644 --- a/tests/components/netatmo/test_sensor.py +++ b/tests/components/netatmo/test_sensor.py @@ -81,6 +81,12 @@ async def test_public_weather_sensor( assert hass.states.get(f"{prefix}humidity").state == "76" assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1014.4" + prefix = "sensor.home_min_" + + assert hass.states.get(f"{prefix}temperature").state == "19.8" + assert hass.states.get(f"{prefix}humidity").state == "56" + assert hass.states.get(f"{prefix}atmospheric_pressure").state == "1005.4" + prefix = "sensor.home_avg_" assert hass.states.get(f"{prefix}temperature").state == "22.7" diff --git a/tests/components/netgear_lte/snapshots/test_init.ambr b/tests/components/netgear_lte/snapshots/test_init.ambr index e893d36a06e..ca65c17cc8e 100644 --- a/tests/components/netgear_lte/snapshots/test_init.ambr +++ b/tests/components/netgear_lte/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Netgear', 'model': 'LM1200', + 'model_id': None, 'name': 'Netgear LM1200', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/network/conftest.py b/tests/components/network/conftest.py index 36d9c449d27..d5fbb95a814 100644 --- a/tests/components/network/conftest.py +++ b/tests/components/network/conftest.py @@ -1,9 +1,9 @@ """Tests for the Network Configuration integration.""" +from collections.abc import Generator from unittest.mock import _patch import pytest -from typing_extensions import Generator @pytest.fixture(autouse=True) diff --git a/tests/components/nextbus/conftest.py b/tests/components/nextbus/conftest.py index 84445905c2e..231faccf907 100644 --- a/tests/components/nextbus/conftest.py +++ b/tests/components/nextbus/conftest.py @@ -8,15 +8,32 @@ import pytest @pytest.fixture( params=[ - {"name": "Outbound", "stop": [{"tag": "5650"}]}, [ { "name": "Outbound", - "stop": [{"tag": "5650"}], + "shortName": "Outbound", + "useForUi": True, + "stops": ["5184"], + }, + { + "name": "Outbound - Hidden", + "shortName": "Outbound - Hidden", + "useForUi": False, + "stops": ["5651"], + }, + ], + [ + { + "name": "Outbound", + "shortName": "Outbound", + "useForUi": True, + "stops": ["5184"], }, { "name": "Inbound", - "stop": [{"tag": "5651"}], + "shortName": "Inbound", + "useForUi": True, + "stops": ["5651"], }, ], ] @@ -35,22 +52,65 @@ def mock_nextbus_lists( ) -> MagicMock: """Mock all list functions in nextbus to test validate logic.""" instance = mock_nextbus.return_value - instance.get_agency_list.return_value = { - "agency": [{"tag": "sf-muni", "title": "San Francisco Muni"}] - } - instance.get_route_list.return_value = { - "route": [{"tag": "F", "title": "F - Market & Wharves"}] - } - instance.get_route_config.return_value = { - "route": { - "stop": [ - {"tag": "5650", "title": "Market St & 7th St"}, - {"tag": "5651", "title": "Market St & 7th St"}, - # Error case test. Duplicate title with no unique direction - {"tag": "5652", "title": "Market St & 7th St"}, - ], - "direction": route_config_direction, + instance.agencies.return_value = [ + { + "id": "sfmta-cis", + "name": "San Francisco Muni CIS", + "shortName": "SF Muni CIS", + "region": "", + "website": "", + "logo": "", + "nxbs2RedirectUrl": "", } + ] + + instance.routes.return_value = [ + { + "id": "F", + "rev": 1057, + "title": "F Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "timestamp": "2024-06-23T03:06:58Z", + }, + ] + + instance.route_details.return_value = { + "id": "F", + "rev": 1057, + "title": "F Market & Wharves", + "description": "7am-10pm daily", + "color": "", + "textColor": "", + "hidden": False, + "boundingBox": {}, + "stops": [ + { + "id": "5184", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15184", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + { + "id": "5651", + "lat": 37.8071299, + "lon": -122.41732, + "name": "Jones St & Beach St", + "code": "15651", + "hidden": False, + "showDestinationSelector": True, + "directions": ["F_0_var1", "F_0_var0"], + }, + ], + "directions": route_config_direction, + "paths": [], + "timestamp": "2024-06-23T03:06:58Z", } return instance diff --git a/tests/components/nextbus/test_config_flow.py b/tests/components/nextbus/test_config_flow.py index 0a64bc97d9a..4e5b933a189 100644 --- a/tests/components/nextbus/test_config_flow.py +++ b/tests/components/nextbus/test_config_flow.py @@ -1,9 +1,9 @@ """Test the NextBus config flow.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, setup from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN @@ -44,7 +44,7 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_AGENCY: "sf-muni", + CONF_AGENCY: "sfmta-cis", }, ) await hass.async_block_till_done() @@ -68,16 +68,16 @@ async def test_user_config( result = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_STOP: "5650", + CONF_STOP: "5184", }, ) await hass.async_block_till_done() assert result.get("type") is FlowResultType.CREATE_ENTRY assert result.get("data") == { - "agency": "sf-muni", + "agency": "sfmta-cis", "route": "F", - "stop": "5650", + "stop": "5184", } assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/nextbus/test_sensor.py b/tests/components/nextbus/test_sensor.py index 3630ff88855..dd0346c3e7a 100644 --- a/tests/components/nextbus/test_sensor.py +++ b/tests/components/nextbus/test_sensor.py @@ -1,12 +1,12 @@ """The tests for the nexbus sensor component.""" +from collections.abc import Generator from copy import deepcopy from unittest.mock import MagicMock, patch from urllib.error import HTTPError from py_nextbus.client import NextBusFormatError, NextBusHTTPError import pytest -from typing_extensions import Generator from homeassistant.components import sensor from homeassistant.components.nextbus.const import CONF_AGENCY, CONF_ROUTE, DOMAIN @@ -18,9 +18,9 @@ from homeassistant.helpers.update_coordinator import UpdateFailed from tests.common import MockConfigEntry -VALID_AGENCY = "sf-muni" +VALID_AGENCY = "sfmta-cis" VALID_ROUTE = "F" -VALID_STOP = "5650" +VALID_STOP = "5184" VALID_AGENCY_TITLE = "San Francisco Muni" VALID_ROUTE_TITLE = "F-Market & Wharves" VALID_STOP_TITLE = "Market St & 7th St" @@ -44,25 +44,38 @@ CONFIG_BASIC = { } } -BASIC_RESULTS = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - {"minutes": "10", "epochTime": "1553807380000"}, - ], +BASIC_RESULTS = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [ + {"minutes": 1, "timestamp": 1553807371000}, + {"minutes": 2, "timestamp": 1553807372000}, + {"minutes": 3, "timestamp": 1553807373000}, + {"minutes": 10, "timestamp": 1553807380000}, + ], } -} +] + +NO_UPCOMING = [ + { + "route": { + "title": VALID_ROUTE_TITLE, + "id": VALID_ROUTE, + }, + "stop": { + "name": VALID_STOP_TITLE, + "id": VALID_STOP, + }, + "values": [], + } +] @pytest.fixture @@ -78,9 +91,9 @@ def mock_nextbus_predictions( ) -> Generator[MagicMock]: """Create a mock of NextBusClient predictions.""" instance = mock_nextbus.return_value - instance.get_predictions_for_multi_stops.return_value = BASIC_RESULTS + instance.predictions_for_stop.return_value = BASIC_RESULTS - return instance.get_predictions_for_multi_stops + return instance.predictions_for_stop async def assert_setup_sensor( @@ -105,117 +118,23 @@ async def assert_setup_sensor( return config_entry -async def test_message_dict( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, -) -> None: - """Verify that a single dict message is rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": {"text": "Message"}, - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - } - } - - await assert_setup_sensor(hass, CONFIG_BASIC) - - state = hass.states.get(SENSOR_ID) - assert state is not None - assert state.attributes["message"] == "Message" - - -async def test_message_list( +async def test_predictions( hass: HomeAssistant, mock_nextbus: MagicMock, mock_nextbus_lists: MagicMock, mock_nextbus_predictions: MagicMock, ) -> None: """Verify that a list of messages are rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": [{"text": "Message 1"}, {"text": "Message 2"}], - "direction": { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - } - } - - await assert_setup_sensor(hass, CONFIG_BASIC) - - state = hass.states.get(SENSOR_ID) - assert state is not None - assert state.attributes["message"] == "Message 1 -- Message 2" - - -async def test_direction_list( - hass: HomeAssistant, - mock_nextbus: MagicMock, - mock_nextbus_lists: MagicMock, - mock_nextbus_predictions: MagicMock, -) -> None: - """Verify that a list of messages are rendered correctly.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "message": [{"text": "Message 1"}, {"text": "Message 2"}], - "direction": [ - { - "title": "Outbound", - "prediction": [ - {"minutes": "1", "epochTime": "1553807371000"}, - {"minutes": "2", "epochTime": "1553807372000"}, - {"minutes": "3", "epochTime": "1553807373000"}, - ], - }, - { - "title": "Outbound 2", - "prediction": {"minutes": "0", "epochTime": "1553807374000"}, - }, - ], - } - } await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None assert state.state == "2019-03-28T21:09:31+00:00" - assert state.attributes["agency"] == VALID_AGENCY_TITLE + assert state.attributes["agency"] == VALID_AGENCY assert state.attributes["route"] == VALID_ROUTE_TITLE assert state.attributes["stop"] == VALID_STOP_TITLE - assert state.attributes["direction"] == "Outbound, Outbound 2" - assert state.attributes["upcoming"] == "0, 1, 2, 3" + assert state.attributes["upcoming"] == "1, 2, 3, 10" @pytest.mark.parametrize( @@ -256,27 +175,19 @@ async def test_custom_name( assert state.name == "Custom Name" -@pytest.mark.parametrize( - "prediction_results", - [ - {}, - {"Error": "Failed"}, - ], -) -async def test_no_predictions( +async def test_verify_no_predictions( hass: HomeAssistant, mock_nextbus: MagicMock, - mock_nextbus_predictions: MagicMock, mock_nextbus_lists: MagicMock, - prediction_results: dict[str, str], + mock_nextbus_predictions: MagicMock, ) -> None: - """Verify there are no exceptions when no predictions are returned.""" - mock_nextbus_predictions.return_value = prediction_results - + """Verify attributes are set despite no upcoming times.""" + mock_nextbus_predictions.return_value = [] await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None + assert "upcoming" not in state.attributes assert state.state == "unknown" @@ -287,21 +198,10 @@ async def test_verify_no_upcoming( mock_nextbus_predictions: MagicMock, ) -> None: """Verify attributes are set despite no upcoming times.""" - mock_nextbus_predictions.return_value = { - "predictions": { - "agencyTitle": VALID_AGENCY_TITLE, - "agencyTag": VALID_AGENCY, - "routeTitle": VALID_ROUTE_TITLE, - "routeTag": VALID_ROUTE, - "stopTitle": VALID_STOP_TITLE, - "stopTag": VALID_STOP, - "direction": {"title": "Outbound", "prediction": []}, - } - } - + mock_nextbus_predictions.return_value = NO_UPCOMING await assert_setup_sensor(hass, CONFIG_BASIC) state = hass.states.get(SENSOR_ID) assert state is not None - assert state.state == "unknown" assert state.attributes["upcoming"] == "No upcoming predictions" + assert state.state == "unknown" diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index d6cd39e7fc8..cf3eda55fe1 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,9 +1,9 @@ """Fixtrues for the Nextcloud integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/nextdns/test_diagnostics.py b/tests/components/nextdns/test_diagnostics.py index 7652bc4f03e..3bb1fc3ee67 100644 --- a/tests/components/nextdns/test_diagnostics.py +++ b/tests/components/nextdns/test_diagnostics.py @@ -1,6 +1,7 @@ """Test NextDNS diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -18,4 +19,6 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" entry = await init_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/nibe_heatpump/conftest.py b/tests/components/nibe_heatpump/conftest.py index c44875414e2..47b65772a24 100644 --- a/tests/components/nibe_heatpump/conftest.py +++ b/tests/components/nibe_heatpump/conftest.py @@ -1,12 +1,12 @@ """Test configuration for Nibe Heat Pump.""" +from collections.abc import Generator from contextlib import ExitStack from unittest.mock import AsyncMock, Mock, patch from freezegun.api import FrozenDateTimeFactory from nibe.exceptions import CoilNotFoundException import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/nibe_heatpump/test_config_flow.py b/tests/components/nibe_heatpump/test_config_flow.py index 471f7f4c593..de5f577fa7d 100644 --- a/tests/components/nibe_heatpump/test_config_flow.py +++ b/tests/components/nibe_heatpump/test_config_flow.py @@ -38,7 +38,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def _get_connection_form( hass: HomeAssistant, connection_type: str -) -> FlowResultType: +) -> config_entries.ConfigFlowResult: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} diff --git a/tests/components/notify/conftest.py b/tests/components/notify/conftest.py index 0efb3a4689d..91dc92a27fe 100644 --- a/tests/components/notify/conftest.py +++ b/tests/components/notify/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Notify platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index d6478c358bf..b499486b312 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -226,7 +226,11 @@ async def test_invalid_service( ) -> None: """Test service setup with an invalid service object or platform.""" - def get_service(hass, config, discovery_info=None): + def get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> notify.BaseNotificationService | None: """Return None for an invalid notify service.""" return None diff --git a/tests/components/notion/conftest.py b/tests/components/notion/conftest.py index 17bea306ad8..6a6e150c960 100644 --- a/tests/components/notion/conftest.py +++ b/tests/components/notion/conftest.py @@ -1,5 +1,6 @@ """Define fixtures for Notion tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, Mock, patch @@ -8,7 +9,6 @@ from aionotion.listener.models import Listener from aionotion.sensor.models import Sensor from aionotion.user.models import UserPreferences import pytest -from typing_extensions import Generator from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN from homeassistant.const import CONF_USERNAME diff --git a/tests/components/notion/test_diagnostics.py b/tests/components/notion/test_diagnostics.py index 023b9369f03..4d87b6292e4 100644 --- a/tests/components/notion/test_diagnostics.py +++ b/tests/components/notion/test_diagnostics.py @@ -4,6 +4,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.components.notion import DOMAIN from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -33,6 +34,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "bridges": [ diff --git a/tests/components/numato/conftest.py b/tests/components/numato/conftest.py index c6fd13a099e..f3ae4d5f32b 100644 --- a/tests/components/numato/conftest.py +++ b/tests/components/numato/conftest.py @@ -1,17 +1,18 @@ """Fixtures for numato tests.""" from copy import deepcopy +from typing import Any import pytest from homeassistant.components import numato -from . import numato_mock from .common import NUMATO_CFG +from .numato_mock import NumatoModuleMock @pytest.fixture -def config(): +def config() -> dict[str, Any]: """Provide a copy of the numato domain's test configuration. This helps to quickly change certain aspects of the configuration scoped @@ -21,8 +22,8 @@ def config(): @pytest.fixture -def numato_fixture(monkeypatch): +def numato_fixture(monkeypatch: pytest.MonkeyPatch) -> NumatoModuleMock: """Inject the numato mockup into numato homeassistant module.""" - module_mock = numato_mock.NumatoModuleMock() + module_mock = NumatoModuleMock() monkeypatch.setattr(numato, "gpio", module_mock) return module_mock diff --git a/tests/components/numato/test_binary_sensor.py b/tests/components/numato/test_binary_sensor.py index 524589af198..08506349247 100644 --- a/tests/components/numato/test_binary_sensor.py +++ b/tests/components/numato/test_binary_sensor.py @@ -21,7 +21,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) diff --git a/tests/components/numato/test_init.py b/tests/components/numato/test_init.py index 35dd102ec9e..4695265f37f 100644 --- a/tests/components/numato/test_init.py +++ b/tests/components/numato/test_init.py @@ -11,7 +11,7 @@ from .common import NUMATO_CFG, mockup_raise, mockup_return async def test_setup_no_devices( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test handling of an 'empty' discovery. @@ -24,7 +24,7 @@ async def test_setup_no_devices( async def test_fail_setup_raising_discovery( - hass: HomeAssistant, numato_fixture, caplog: pytest.LogCaptureFixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test handling of an exception during discovery. @@ -57,7 +57,7 @@ async def test_hass_numato_api_wrong_port_directions( async def test_hass_numato_api_errors( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test whether Home Assistant numato API (re-)raises errors.""" numato_fixture.discover() diff --git a/tests/components/numato/test_sensor.py b/tests/components/numato/test_sensor.py index 30a9f174941..c652df9b086 100644 --- a/tests/components/numato/test_sensor.py +++ b/tests/components/numato/test_sensor.py @@ -1,5 +1,7 @@ """Tests for the numato sensor platform.""" +import pytest + from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery @@ -13,7 +15,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -24,7 +26,7 @@ async def test_failing_setups_no_entities( async def test_failing_sensor_update( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test condition when a sensor update fails.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "adc_read", mockup_raise) diff --git a/tests/components/numato/test_switch.py b/tests/components/numato/test_switch.py index e69b3481b1d..42102ea4869 100644 --- a/tests/components/numato/test_switch.py +++ b/tests/components/numato/test_switch.py @@ -1,5 +1,7 @@ """Tests for the numato switch platform.""" +import pytest + from homeassistant.components import switch from homeassistant.const import ( ATTR_ENTITY_ID, @@ -20,7 +22,7 @@ MOCKUP_ENTITY_IDS = { async def test_failing_setups_no_entities( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """When port setup fails, no entity shall be created.""" monkeypatch.setattr(numato_fixture.NumatoDeviceMock, "setup", mockup_raise) @@ -69,7 +71,7 @@ async def test_regular_hass_operations(hass: HomeAssistant, numato_fixture) -> N async def test_failing_hass_operations( - hass: HomeAssistant, numato_fixture, monkeypatch + hass: HomeAssistant, numato_fixture, monkeypatch: pytest.MonkeyPatch ) -> None: """Test failing operations called from within Home Assistant. diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 6f74a3126c0..55dad2506f1 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -1,10 +1,10 @@ """The tests for the Number component.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.components.number import ( ATTR_MAX, @@ -646,7 +646,7 @@ async def test_restore_number_restore_state( assert entity0.native_min_value == native_min_value assert entity0.native_step == native_step assert entity0.native_value == native_value - assert type(entity0.native_value) == native_value_type + assert type(entity0.native_value) is native_value_type assert entity0.native_unit_of_measurement == uom diff --git a/tests/components/nws/const.py b/tests/components/nws/const.py index e5fc9df909f..39e954af15a 100644 --- a/tests/components/nws/const.py +++ b/tests/components/nws/const.py @@ -66,6 +66,7 @@ CLEAR_NIGHT_OBSERVATION = DEFAULT_OBSERVATION.copy() CLEAR_NIGHT_OBSERVATION["iconTime"] = "night" SENSOR_EXPECTED_OBSERVATION_METRIC = { + "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": "5", "temperature": "10", "windChill": "5", @@ -80,6 +81,7 @@ SENSOR_EXPECTED_OBSERVATION_METRIC = { } SENSOR_EXPECTED_OBSERVATION_IMPERIAL = { + "timestamp": "2019-08-12T23:53:00+00:00", "dewpoint": str( round( TemperatureConverter.convert( @@ -185,6 +187,7 @@ DEFAULT_FORECAST = [ "temperature": 10, "windSpeedAvg": 10, "windBearing": 180, + "shortForecast": "A short forecast.", "detailedForecast": "A detailed forecast.", "timestamp": "2019-08-12T23:53:00+00:00", "iconTime": "night", diff --git a/tests/components/nws/snapshots/test_diagnostics.ambr b/tests/components/nws/snapshots/test_diagnostics.ambr index 2db73f90054..f8bd82a35c4 100644 --- a/tests/components/nws/snapshots/test_diagnostics.ambr +++ b/tests/components/nws/snapshots/test_diagnostics.ambr @@ -21,6 +21,7 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, + 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', @@ -48,6 +49,7 @@ 'number': 1, 'probabilityOfPrecipitation': 89, 'relativeHumidity': 75, + 'shortForecast': 'A short forecast.', 'startTime': '2019-08-12T20:00:00-04:00', 'temperature': 10, 'timestamp': '2019-08-12T23:53:00+00:00', diff --git a/tests/components/nws/snapshots/test_weather.ambr b/tests/components/nws/snapshots/test_weather.ambr index f4669f47615..1df1c2fa644 100644 --- a/tests/components/nws/snapshots/test_weather.ambr +++ b/tests/components/nws/snapshots/test_weather.ambr @@ -1,95 +1,44 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] +# name: test_detailed_forecast_service[hourly] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'is_daytime': False, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + dict({ + 'datetime': '2019-08-12T20:00:00-04:00', + 'short_description': 'A short forecast.', + }), + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].1 +# name: test_detailed_forecast_service[twice_daily] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + dict({ + 'datetime': '2019-08-12T20:00:00-04:00', + 'detailed_description': 'A detailed forecast.', + 'is_daytime': False, + 'short_description': 'A short forecast.', + }), + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].2 +# name: test_detailed_forecast_service_no_data[hourly] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'is_daytime': False, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), + 'weather.abc': dict({ + 'forecast': list([ + ]), + }), }) # --- -# name: test_forecast_service[get_forecast].3 +# name: test_detailed_forecast_service_no_data[twice_daily] dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].4 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', - 'dew_point': -15.6, - 'humidity': 75, - 'precipitation_probability': 89, - 'temperature': -12.2, - 'wind_bearing': 180, - 'wind_speed': 16.09, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].5 - dict({ - 'forecast': list([ - ]), + 'weather.abc': dict({ + 'forecast': list([ + ]), + }), }) # --- # name: test_forecast_service[get_forecasts] @@ -99,7 +48,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -119,7 +67,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -138,7 +85,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'is_daytime': False, @@ -158,7 +104,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -177,7 +122,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -202,7 +146,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, @@ -217,7 +160,6 @@ dict({ 'condition': 'lightning-rainy', 'datetime': '2019-08-12T20:00:00-04:00', - 'detailed_description': 'A detailed forecast.', 'dew_point': -15.6, 'humidity': 75, 'precipitation_probability': 89, diff --git a/tests/components/nws/test_weather.py b/tests/components/nws/test_weather.py index b4f4b5155a1..bbf808dbd1f 100644 --- a/tests/components/nws/test_weather.py +++ b/tests/components/nws/test_weather.py @@ -554,3 +554,83 @@ async def test_forecast_subscription_with_failing_coordinator( ) msg = await client.receive_json() assert not msg["success"] + + +@pytest.mark.parametrize( + ("forecast_type"), + [ + "hourly", + "twice_daily", + ], +) +async def test_detailed_forecast_service( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + mock_simple_nws, + no_sensor, + forecast_type: str, +) -> None: + """Test detailed forecast.""" + + entry = MockConfigEntry( + domain=nws.DOMAIN, + data=NWS_CONFIG, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + nws.DOMAIN, + "get_forecasts_extra", + { + "entity_id": "weather.abc", + "type": forecast_type, + }, + blocking=True, + return_response=True, + ) + assert response == snapshot + + +@pytest.mark.parametrize( + ("forecast_type"), + [ + "hourly", + "twice_daily", + ], +) +async def test_detailed_forecast_service_no_data( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + snapshot: SnapshotAssertion, + mock_simple_nws, + no_sensor, + forecast_type: str, +) -> None: + """Test detailed forecast.""" + instance = mock_simple_nws.return_value + instance.forecast = None + instance.forecast_hourly = None + entry = MockConfigEntry( + domain=nws.DOMAIN, + data=NWS_CONFIG, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + response = await hass.services.async_call( + nws.DOMAIN, + "get_forecasts_extra", + { + "entity_id": "weather.abc", + "type": forecast_type, + }, + blocking=True, + return_response=True, + ) + assert response == snapshot diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index 5c57feb471b..9261521f850 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -216,8 +216,8 @@ def test_nx584_watcher_run_with_zone_events() -> None: """Return nothing twice, then some events.""" if empty_me: empty_me.pop() - else: - return fake_events + return None + return fake_events client = mock.MagicMock() fake_events = [ diff --git a/tests/components/obihai/__init__.py b/tests/components/obihai/__init__.py index d43aa6a9bb8..b88f0a5c874 100644 --- a/tests/components/obihai/__init__.py +++ b/tests/components/obihai/__init__.py @@ -32,3 +32,4 @@ def get_schema_suggestion(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] + return None diff --git a/tests/components/obihai/conftest.py b/tests/components/obihai/conftest.py index c4edfdedf65..ef54c12ba26 100644 --- a/tests/components/obihai/conftest.py +++ b/tests/components/obihai/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for Obihai.""" +from collections.abc import Generator from socket import gaierror from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/ollama/__init__.py b/tests/components/ollama/__init__.py index 22a576e94a4..6ad77bb2217 100644 --- a/tests/components/ollama/__init__.py +++ b/tests/components/ollama/__init__.py @@ -1,7 +1,7 @@ """Tests for the Ollama integration.""" from homeassistant.components import ollama -from homeassistant.components.ollama.const import DEFAULT_PROMPT +from homeassistant.helpers import llm TEST_USER_DATA = { ollama.CONF_URL: "http://localhost:11434", @@ -9,6 +9,6 @@ TEST_USER_DATA = { } TEST_OPTIONS = { - ollama.CONF_PROMPT: DEFAULT_PROMPT, + ollama.CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, ollama.CONF_MAX_HISTORY: 2, } diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index db1689bd416..b28b8850cd5 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -5,7 +5,9 @@ from unittest.mock import patch import pytest from homeassistant.components import ollama +from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm from homeassistant.setup import async_setup_component from . import TEST_OPTIONS, TEST_USER_DATA @@ -25,6 +27,17 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: return entry +@pytest.fixture +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} + ) + return mock_config_entry + + @pytest.fixture async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfigEntry): """Initialize integration.""" @@ -35,6 +48,7 @@ async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfig ): assert await async_setup_component(hass, ollama.DOMAIN, {}) await hass.async_block_till_done() + yield @pytest.fixture(autouse=True) diff --git a/tests/components/ollama/snapshots/test_conversation.ambr b/tests/components/ollama/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..e4dd7cd00bb --- /dev/null +++ b/tests/components/ollama/snapshots/test_conversation.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_unknown_hass_api + dict({ + 'conversation_id': None, + 'response': IntentResponse( + card=dict({ + }), + error_code=, + failed_results=list([ + ]), + intent=None, + intent_targets=list([ + ]), + language='en', + matched_states=list([ + ]), + reprompt=dict({ + }), + response_type=, + speech=dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Error preparing LLM API: API non-existing not found', + }), + }), + speech_slots=dict({ + }), + success_results=list([ + ]), + unmatched_states=list([ + ]), + ), + }) +# --- diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index b6f0be3c414..cb56b398342 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -1,21 +1,19 @@ """Tests for the Ollama integration.""" -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch from ollama import Message, ResponseError import pytest +from syrupy.assertion import SnapshotAssertion +import voluptuous as vol from homeassistant.components import conversation, ollama from homeassistant.components.conversation import trace -from homeassistant.components.homeassistant.exposed_entities import async_expose_entity -from homeassistant.const import ATTR_FRIENDLY_NAME, MATCH_ALL +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import Context, HomeAssistant -from homeassistant.helpers import ( - area_registry as ar, - device_registry as dr, - entity_registry as er, - intent, -) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent, llm from tests.common import MockConfigEntry @@ -25,9 +23,6 @@ async def test_chat( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component, - area_registry: ar.AreaRegistry, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, agent_id: str, ) -> None: """Test that the chat function is called with the appropriate arguments.""" @@ -35,48 +30,8 @@ async def test_chat( if agent_id is None: agent_id = mock_config_entry.entry_id - # Create some areas, devices, and entities - area_kitchen = area_registry.async_get_or_create("kitchen_id") - area_kitchen = area_registry.async_update(area_kitchen.id, name="kitchen") - area_bedroom = area_registry.async_get_or_create("bedroom_id") - area_bedroom = area_registry.async_update(area_bedroom.id, name="bedroom") - area_office = area_registry.async_get_or_create("office_id") - area_office = area_registry.async_update(area_office.id, name="office") - entry = MockConfigEntry() entry.add_to_hass(hass) - kitchen_device = device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - connections=set(), - identifiers={("demo", "id-1234")}, - ) - device_registry.async_update_device(kitchen_device.id, area_id=area_kitchen.id) - - kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") - kitchen_light = entity_registry.async_update_entity( - kitchen_light.entity_id, device_id=kitchen_device.id - ) - hass.states.async_set( - kitchen_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "kitchen light"} - ) - - bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") - bedroom_light = entity_registry.async_update_entity( - bedroom_light.entity_id, area_id=area_bedroom.id - ) - hass.states.async_set( - bedroom_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "bedroom light"} - ) - - # Hide the office light - office_light = entity_registry.async_get_or_create("light", "demo", "ABCD") - office_light = entity_registry.async_update_entity( - office_light.entity_id, area_id=area_office.id - ) - hass.states.async_set( - office_light.entity_id, "on", attributes={ATTR_FRIENDLY_NAME: "office light"} - ) - async_expose_entity(hass, conversation.DOMAIN, office_light.entity_id, False) with patch( "ollama.AsyncClient.chat", @@ -100,12 +55,6 @@ async def test_chat( Message({"role": "user", "content": "test message"}), ] - # Verify only exposed devices/areas are in prompt - assert "kitchen light" in prompt - assert "bedroom light" in prompt - assert "office light" not in prompt - assert "office" not in prompt - assert ( result.response.response_type == intent.IntentResponseType.ACTION_DONE ), result @@ -122,7 +71,254 @@ async def test_chat( ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] - assert "The current time is" in detail_event["data"]["messages"][0]["content"] + assert "Current time is" in detail_event["data"]["messages"][0]["content"] + + +async def test_template_variables( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template variables work.""" + context = Context(user_id="12345") + mock_user = Mock() + mock_user.id = "12345" + mock_user.name = "Test User" + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": ( + "The user name is {{ user_name }}. " + "The user id is {{ llm_context.context.user_id }}." + ), + }, + ) + with ( + patch("ollama.AsyncClient.list"), + patch( + "ollama.AsyncClient.chat", + return_value={"message": {"role": "assistant", "content": "test response"}}, + ) as mock_chat, + patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, context, agent_id=mock_config_entry.entry_id + ) + + assert ( + result.response.response_type == intent.IntentResponseType.ACTION_DONE + ), result + + args = mock_chat.call_args.kwargs + prompt = args["messages"][0]["content"] + + assert "The user name is Test User." in prompt + assert "The user id is 12345." in prompt + + +@pytest.mark.parametrize( + ("tool_args", "expected_tool_args"), + [ + ({"param1": "test_value"}, {"param1": "test_value"}), + ({"param1": 2}, {"param1": 2}), + ( + {"param1": "test_value", "floor": ""}, + {"param1": "test_value"}, # Omit empty arguments + ), + ( + {"domain": '["light"]'}, + {"domain": ["light"]}, # Repair invalid json arguments + ), + ( + {"domain": "['light']"}, + {"domain": "['light']"}, # Preserve invalid json that can't be parsed + ), + ], +) +@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") +async def test_function_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, + tool_args: dict[str, Any], + expected_tool_args: dict[str, Any], +) -> None: + """Test function call from the assistant.""" + agent_id = mock_config_entry_with_assist.entry_id + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + if message["role"] == "tool": + return { + "message": { + "role": "assistant", + "content": "I have successfully called the function", + } + } + + return { + "message": { + "role": "assistant", + "tool_calls": [ + { + "function": { + "name": "test_tool", + "arguments": tool_args, + } + } + ], + } + } + + with patch( + "ollama.AsyncClient.chat", + side_effect=completion_result, + ) as mock_chat: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert mock_chat.call_count == 2 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert ( + result.response.speech["plain"]["speech"] + == "I have successfully called the function" + ) + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args=expected_tool_args, + ), + llm.LLMContext( + platform="ollama", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +@patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") +async def test_function_exception( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call with exception.""" + agent_id = mock_config_entry_with_assist.entry_id + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + if message["role"] == "tool": + return { + "message": { + "role": "assistant", + "content": "There was an error calling the function", + } + } + + return { + "message": { + "role": "assistant", + "tool_calls": [ + { + "function": { + "name": "test_tool", + "arguments": {"param1": "test_value"}, + } + } + ], + } + } + + with patch( + "ollama.AsyncClient.chat", + side_effect=completion_result, + ) as mock_chat: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert mock_chat.call_count == 2 + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert ( + result.response.speech["plain"]["speech"] + == "There was an error calling the function" + ) + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="ollama", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +async def test_unknown_hass_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_init_component, +) -> None: + """Test when we reference an API that no longer exists.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "non-existing", + }, + ) + await hass.async_block_till_done() + + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id + ) + + assert result == snapshot async def test_message_history_trimming( @@ -359,3 +555,26 @@ async def test_conversation_agent( mock_config_entry.entry_id ) assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 + + +async def test_conversation_agent_with_assist( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test OllamaConversationEntity.""" + agent = conversation.get_agent_manager(hass).async_get_agent( + mock_config_entry_with_assist.entry_id + ) + assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == conversation.ConversationEntityFeature.CONTROL + ) diff --git a/tests/components/onboarding/test_views.py b/tests/components/onboarding/test_views.py index e9ba720adb3..dd53d6cbce6 100644 --- a/tests/components/onboarding/test_views.py +++ b/tests/components/onboarding/test_views.py @@ -1,13 +1,13 @@ """Test the onboarding views.""" import asyncio +from collections.abc import AsyncGenerator from http import HTTPStatus import os from typing import Any from unittest.mock import Mock, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant.components import onboarding from homeassistant.components.onboarding import const, views @@ -28,7 +28,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def auth_active(hass): +def auth_active(hass: HomeAssistant) -> None: """Ensure auth is always active.""" hass.loop.run_until_complete( register_auth_provider(hass, {"type": "homeassistant"}) diff --git a/tests/components/ondilo_ico/conftest.py b/tests/components/ondilo_ico/conftest.py index 6a03d6961c2..a847c1df069 100644 --- a/tests/components/ondilo_ico/conftest.py +++ b/tests/components/ondilo_ico/conftest.py @@ -1,10 +1,10 @@ """Provide basic Ondilo fixture.""" +from collections.abc import Generator from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ondilo_ico.const import DOMAIN diff --git a/tests/components/ondilo_ico/snapshots/test_init.ambr b/tests/components/ondilo_ico/snapshots/test_init.ambr index 355c5902722..44008ac907e 100644 --- a/tests/components/ondilo_ico/snapshots/test_init.ambr +++ b/tests/components/ondilo_ico/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', + 'model_id': None, 'name': 'Pool 1', 'name_by_user': None, 'primary_config_entry': , @@ -52,6 +53,7 @@ }), 'manufacturer': 'Ondilo', 'model': 'ICO', + 'model_id': None, 'name': 'Pool 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/conftest.py b/tests/components/onewire/conftest.py index 47b50ab10e0..65a86b58f2f 100644 --- a/tests/components/onewire/conftest.py +++ b/tests/components/onewire/conftest.py @@ -1,10 +1,10 @@ """Provide common 1-Wire fixtures.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pyownet.protocol import ConnError import pytest -from typing_extensions import Generator from homeassistant.components.onewire.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/onewire/snapshots/test_binary_sensor.ambr b/tests/components/onewire/snapshots/test_binary_sensor.ambr index b3d330291ab..450cc4c7486 100644 --- a/tests/components/onewire/snapshots/test_binary_sensor.ambr +++ b/tests/components/onewire/snapshots/test_binary_sensor.ambr @@ -34,6 +34,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -75,6 +76,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -116,6 +118,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -257,6 +260,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -298,6 +302,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -327,6 +332,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -368,6 +374,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -409,6 +416,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -450,6 +458,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -491,6 +500,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -532,6 +542,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -573,6 +584,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -966,6 +978,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1007,6 +1020,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1136,6 +1150,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1177,6 +1192,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1218,6 +1234,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1259,6 +1276,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1300,6 +1318,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1341,6 +1360,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1382,6 +1402,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -1423,6 +1444,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/snapshots/test_sensor.ambr b/tests/components/onewire/snapshots/test_sensor.ambr index acf9ea6a8c8..5ad4cf2ef4b 100644 --- a/tests/components/onewire/snapshots/test_sensor.ambr +++ b/tests/components/onewire/snapshots/test_sensor.ambr @@ -34,6 +34,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -75,6 +76,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -165,6 +167,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -316,6 +319,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -453,6 +457,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -482,6 +487,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -619,6 +625,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -709,6 +716,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1289,6 +1297,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1379,6 +1388,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1469,6 +1479,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -1559,6 +1570,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1600,6 +1612,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1837,6 +1850,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1878,6 +1892,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1968,6 +1983,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -2058,6 +2074,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -2295,6 +2312,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -2434,6 +2452,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -3014,6 +3033,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -3202,6 +3222,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -3439,6 +3460,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/snapshots/test_switch.ambr b/tests/components/onewire/snapshots/test_switch.ambr index d6cbb6f3fef..3bc7a2d3def 100644 --- a/tests/components/onewire/snapshots/test_switch.ambr +++ b/tests/components/onewire/snapshots/test_switch.ambr @@ -34,6 +34,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2405', + 'model_id': None, 'name': '05.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -119,6 +120,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18S20', + 'model_id': None, 'name': '10.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -160,6 +162,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2406', + 'model_id': None, 'name': '12.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -389,6 +392,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -430,6 +434,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2409', + 'model_id': None, 'name': '1F.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -459,6 +464,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2423', + 'model_id': None, 'name': '1D.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -500,6 +506,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1822', + 'model_id': None, 'name': '22.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -541,6 +548,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': '26.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -626,6 +634,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -667,6 +676,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -708,6 +718,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS18B20', + 'model_id': None, 'name': '28.222222222223', 'name_by_user': None, 'primary_config_entry': , @@ -749,6 +760,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2408', + 'model_id': None, 'name': '29.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1494,6 +1506,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2760', + 'model_id': None, 'name': '30.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1535,6 +1548,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2413', + 'model_id': None, 'name': '3A.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1664,6 +1678,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS1825', + 'model_id': None, 'name': '3B.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1705,6 +1720,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS28EA00', + 'model_id': None, 'name': '42.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1746,6 +1762,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0068', + 'model_id': None, 'name': '7E.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1787,6 +1804,7 @@ }), 'manufacturer': 'Embedded Data Systems', 'model': 'EDS0066', + 'model_id': None, 'name': '7E.222222222222', 'name_by_user': None, 'primary_config_entry': , @@ -1828,6 +1846,7 @@ }), 'manufacturer': 'Maxim Integrated', 'model': 'DS2438', + 'model_id': None, 'name': 'A6.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1913,6 +1932,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HobbyBoards_EF', + 'model_id': None, 'name': 'EF.111111111111', 'name_by_user': None, 'primary_config_entry': , @@ -1954,6 +1974,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_MOISTURE_METER', + 'model_id': None, 'name': 'EF.111111111112', 'name_by_user': None, 'primary_config_entry': , @@ -2347,6 +2368,7 @@ }), 'manufacturer': 'Hobby Boards', 'model': 'HB_HUB', + 'model_id': None, 'name': 'EF.111111111113', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/onewire/test_binary_sensor.py b/tests/components/onewire/test_binary_sensor.py index 8b1129529d5..31895f705ff 100644 --- a/tests/components/onewire/test_binary_sensor.py +++ b/tests/components/onewire/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for 1-Wire binary sensors.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_diagnostics.py b/tests/components/onewire/test_diagnostics.py index 62b045c4516..ecdae859597 100644 --- a/tests/components/onewire/test_diagnostics.py +++ b/tests/components/onewire/test_diagnostics.py @@ -1,10 +1,10 @@ """Test 1-Wire diagnostics.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_sensor.py b/tests/components/onewire/test_sensor.py index df0a81920c9..ba0e21701f8 100644 --- a/tests/components/onewire/test_sensor.py +++ b/tests/components/onewire/test_sensor.py @@ -1,5 +1,6 @@ """Tests for 1-Wire sensors.""" +from collections.abc import Generator from copy import deepcopy import logging from unittest.mock import MagicMock, _patch_dict, patch @@ -7,7 +8,6 @@ from unittest.mock import MagicMock, _patch_dict, patch from pyownet.protocol import OwnetError import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/onewire/test_switch.py b/tests/components/onewire/test_switch.py index b1b8e5ddbd0..936e83f66ec 100644 --- a/tests/components/onewire/test_switch.py +++ b/tests/components/onewire/test_switch.py @@ -1,10 +1,10 @@ """Tests for 1-Wire switches.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/onvif/test_diagnostics.py b/tests/components/onvif/test_diagnostics.py index d58c8008ea6..ce8febe2341 100644 --- a/tests/components/onvif/test_diagnostics.py +++ b/tests/components/onvif/test_diagnostics.py @@ -1,6 +1,7 @@ """Test ONVIF diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -19,4 +20,6 @@ async def test_diagnostics( entry, _, _ = await setup_onvif_integration(hass) - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/open_meteo/conftest.py b/tests/components/open_meteo/conftest.py index 0d3e1274693..22138846915 100644 --- a/tests/components/open_meteo/conftest.py +++ b/tests/components/open_meteo/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch from open_meteo import Forecast import pytest -from typing_extensions import Generator from homeassistant.components.open_meteo.const import DOMAIN from homeassistant.const import CONF_ZONE diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 1008482847c..e0665bc449f 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -27,6 +27,33 @@ from homeassistant.util import ulid from tests.common import MockConfigEntry +async def test_entity( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test entity properties.""" + state = hass.states.get("conversation.openai") + assert state + assert state.attributes["supported_features"] == 0 + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "assist", + }, + ) + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + state = hass.states.get("conversation.openai") + assert state + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + + async def test_error_handling( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component ) -> None: @@ -267,7 +294,7 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -276,6 +303,7 @@ async def test_function_call( "Today's date is 2024-06-03." in trace_events[1]["data"]["messages"][0]["content"] ) + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] # Call it again, make sure we have updated prompt with ( @@ -493,6 +521,8 @@ async def test_unknown_hass_api( }, ) + await hass.async_block_till_done() + result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) diff --git a/tests/components/openai_conversation/test_init.py b/tests/components/openai_conversation/test_init.py index c9431aa1083..d78ce398c92 100644 --- a/tests/components/openai_conversation/test_init.py +++ b/tests/components/openai_conversation/test_init.py @@ -60,33 +60,6 @@ from tests.common import MockConfigEntry "style": "natural", }, ), - ( - {"prompt": "Picture of a dog", "size": "256"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), - ( - {"prompt": "Picture of a dog", "size": "512"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), - ( - {"prompt": "Picture of a dog", "size": "1024"}, - { - "prompt": "Picture of a dog", - "size": "1024x1024", - "quality": "standard", - "style": "vivid", - }, - ), ], ) async def test_generate_image_service( diff --git a/tests/components/openexchangerates/conftest.py b/tests/components/openexchangerates/conftest.py index 6bd7da2c7af..770432ebac3 100644 --- a/tests/components/openexchangerates/conftest.py +++ b/tests/components/openexchangerates/conftest.py @@ -1,9 +1,9 @@ """Provide common fixtures for tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.openexchangerates.const import DOMAIN diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index 30ea619d646..ec06c662201 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -1,6 +1,7 @@ """Test the Open Exchange Rates config flow.""" import asyncio +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch @@ -9,7 +10,6 @@ from aioopenexchangerates import ( OpenExchangeRatesClientError, ) import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.openexchangerates.const import DOMAIN diff --git a/tests/components/opengarage/conftest.py b/tests/components/opengarage/conftest.py index c960e723289..2367692096b 100644 --- a/tests/components/opengarage/conftest.py +++ b/tests/components/opengarage/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.opengarage.const import CONF_DEVICE_KEY, DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT, CONF_VERIFY_SSL diff --git a/tests/components/opensky/conftest.py b/tests/components/opensky/conftest.py index c48f3bec8d8..4664c48ef9e 100644 --- a/tests/components/opensky/conftest.py +++ b/tests/components/opensky/conftest.py @@ -1,10 +1,10 @@ """Configure tests for the OpenSky integration.""" +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, patch import pytest from python_opensky import StatesResponse -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.opensky.const import ( CONF_ALTITUDE, diff --git a/tests/components/openuv/conftest.py b/tests/components/openuv/conftest.py index 69563c94c64..cc344d25ccb 100644 --- a/tests/components/openuv/conftest.py +++ b/tests/components/openuv/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for OpenUV.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN from homeassistant.const import ( diff --git a/tests/components/openuv/test_diagnostics.py b/tests/components/openuv/test_diagnostics.py index 4b5114bccd1..4fe851eea53 100644 --- a/tests/components/openuv/test_diagnostics.py +++ b/tests/components/openuv/test_diagnostics.py @@ -4,6 +4,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -35,6 +36,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "protection_window": { diff --git a/tests/components/opower/test_config_flow.py b/tests/components/opower/test_config_flow.py index a236494f2c9..8134539b0a5 100644 --- a/tests/components/opower/test_config_flow.py +++ b/tests/components/opower/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Opower config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from opower import CannotConnect, InvalidAuth import pytest -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.opower.const import DOMAIN diff --git a/tests/components/oralb/conftest.py b/tests/components/oralb/conftest.py index fa4ba463357..c757d79a78e 100644 --- a/tests/components/oralb/conftest.py +++ b/tests/components/oralb/conftest.py @@ -1,9 +1,9 @@ """OralB session fixtures.""" +from collections.abc import Generator from unittest import mock import pytest -from typing_extensions import Generator class MockServices: diff --git a/tests/components/otbr/test_websocket_api.py b/tests/components/otbr/test_websocket_api.py index df55d38d3b7..5361b56c688 100644 --- a/tests/components/otbr/test_websocket_api.py +++ b/tests/components/otbr/test_websocket_api.py @@ -36,11 +36,14 @@ async def test_get_info( websocket_client, ) -> None: """Test async_get_info.""" + extended_pan_id = "ABCD1234" with ( patch( "python_otbr_api.OTBR.get_active_dataset", - return_value=python_otbr_api.ActiveDataSet(channel=16), + return_value=python_otbr_api.ActiveDataSet( + channel=16, extended_pan_id=extended_pan_id + ), ), patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 @@ -58,12 +61,16 @@ async def test_get_info( msg = await websocket_client.receive_json() assert msg["success"] + extended_address = TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex() assert msg["result"] == { - "url": BASE_URL, - "active_dataset_tlvs": DATASET_CH16.hex().lower(), - "channel": 16, - "border_agent_id": TEST_BORDER_AGENT_ID.hex(), - "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + extended_address: { + "url": BASE_URL, + "active_dataset_tlvs": DATASET_CH16.hex().lower(), + "channel": 16, + "border_agent_id": TEST_BORDER_AGENT_ID.hex(), + "extended_address": extended_address, + "extended_pan_id": extended_pan_id.lower(), + } } @@ -121,6 +128,10 @@ async def test_create_network( patch( "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 ) as get_active_dataset_tlvs_mock, + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -129,7 +140,12 @@ async def test_create_network( return_value=0x1234, ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert msg["success"] @@ -156,7 +172,9 @@ async def test_create_network_no_entry( """Test create network.""" await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + {"type": "otbr/create_network", "extended_address": "blah"} + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -170,11 +188,22 @@ async def test_create_network_fails_1( websocket_client, ) -> None: """Test create network.""" - with patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, + ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -197,8 +226,17 @@ async def test_create_network_fails_2( side_effect=python_otbr_api.OTBRError, ), patch("python_otbr_api.OTBR.factory_reset"), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -223,8 +261,17 @@ async def test_create_network_fails_3( patch( "python_otbr_api.OTBR.factory_reset", ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -248,8 +295,17 @@ async def test_create_network_fails_4( patch( "python_otbr_api.OTBR.factory_reset", ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -268,8 +324,17 @@ async def test_create_network_fails_5( patch("python_otbr_api.OTBR.create_active_dataset"), patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=None), patch("python_otbr_api.OTBR.factory_reset"), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] @@ -291,14 +356,69 @@ async def test_create_network_fails_6( "python_otbr_api.OTBR.factory_reset", side_effect=python_otbr_api.OTBRError, ), + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), ): - await websocket_client.send_json_auto_id({"type": "otbr/create_network"}) + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "factory_reset_failed" +async def test_create_network_fails_7( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test create network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_create_network_fails_8( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test create network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/create_network", + "extended_address": "blah", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" + + async def test_set_network( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -312,6 +432,10 @@ async def test_set_network( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_active_dataset_tlvs" ) as set_active_dataset_tlvs_mock, @@ -320,6 +444,7 @@ async def test_set_network( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -345,6 +470,7 @@ async def test_set_network_no_entry( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": "abc", } ) @@ -368,14 +494,19 @@ async def test_set_network_channel_conflict( multiprotocol_addon_manager_mock.async_get_channel.return_value = 15 - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "dataset_id": dataset_id, - } - ) + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": dataset_id, + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "channel_conflict" @@ -389,14 +520,19 @@ async def test_set_network_unknown_dataset( ) -> None: """Test set network.""" - await websocket_client.send_json_auto_id( - { - "type": "otbr/set_network", - "dataset_id": "abc", - } - ) + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": "abc", + } + ) - msg = await websocket_client.receive_json() + msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "unknown_dataset" @@ -413,13 +549,20 @@ async def test_set_network_fails_1( dataset_store = await thread.dataset_store.async_get_store(hass) dataset_id = list(dataset_store.datasets)[1] - with patch( - "python_otbr_api.OTBR.set_enabled", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch( + "python_otbr_api.OTBR.set_enabled", + side_effect=python_otbr_api.OTBRError, + ), ): await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -441,6 +584,10 @@ async def test_set_network_fails_2( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_enabled", ), @@ -452,6 +599,7 @@ async def test_set_network_fails_2( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -473,6 +621,10 @@ async def test_set_network_fails_3( dataset_id = list(dataset_store.datasets)[1] with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), patch( "python_otbr_api.OTBR.set_enabled", side_effect=[None, python_otbr_api.OTBRError], @@ -484,6 +636,7 @@ async def test_set_network_fails_3( await websocket_client.send_json_auto_id( { "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), "dataset_id": dataset_id, } ) @@ -493,6 +646,54 @@ async def test_set_network_fails_3( assert msg["error"]["code"] == "set_enabled_failed" +async def test_set_network_fails_4( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "dataset_id": "abc", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_set_network_fails_5( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set network.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_network", + "extended_address": "blah", + "dataset_id": "abc", + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" + + async def test_set_channel( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -501,9 +702,19 @@ async def test_set_channel( ) -> None: """Test set channel.""" - with patch("python_otbr_api.OTBR.set_channel"): + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("python_otbr_api.OTBR.set_channel"), + ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -519,9 +730,19 @@ async def test_set_channel_multiprotocol( ) -> None: """Test set channel.""" - with patch("python_otbr_api.OTBR.set_channel"): + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch("python_otbr_api.OTBR.set_channel"), + ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -538,7 +759,11 @@ async def test_set_channel_no_entry( await async_setup_component(hass, "otbr", {}) websocket_client = await hass_ws_client(hass) await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() @@ -546,21 +771,79 @@ async def test_set_channel_no_entry( assert msg["error"]["code"] == "not_loaded" -async def test_set_channel_fails( +async def test_set_channel_fails_1( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_thread, websocket_client, ) -> None: """Test set channel.""" - with patch( - "python_otbr_api.OTBR.set_channel", - side_effect=python_otbr_api.OTBRError, + with ( + patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ), + patch( + "python_otbr_api.OTBR.set_channel", + side_effect=python_otbr_api.OTBRError, + ), ): await websocket_client.send_json_auto_id( - {"type": "otbr/set_channel", "channel": 12} + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } ) msg = await websocket_client.receive_json() assert not msg["success"] assert msg["error"]["code"] == "set_channel_failed" + + +async def test_set_channel_fails_2( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set channel.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + side_effect=python_otbr_api.OTBRError, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_channel", + "extended_address": TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), + "channel": 12, + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "get_extended_address_failed" + + +async def test_set_channel_fails_3( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + otbr_config_entry_multipan, + websocket_client, +) -> None: + """Test set channel.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ): + await websocket_client.send_json_auto_id( + { + "type": "otbr/set_channel", + "extended_address": "blah", + "channel": 12, + } + ) + msg = await websocket_client.receive_json() + + assert not msg["success"] + assert msg["error"]["code"] == "unknown_router" diff --git a/tests/components/otp/conftest.py b/tests/components/otp/conftest.py index 7443d772c69..7926be1e48e 100644 --- a/tests/components/otp/conftest.py +++ b/tests/components/otp/conftest.py @@ -14,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.otp.async_setup_entry", return_value=True @@ -23,7 +23,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_pyotp() -> Generator[MagicMock, None, None]: +def mock_pyotp() -> Generator[MagicMock]: """Mock a pyotp.""" with ( patch( diff --git a/tests/components/ourgroceries/__init__.py b/tests/components/ourgroceries/__init__.py index 6f90cb7ea1b..4ebbea46229 100644 --- a/tests/components/ourgroceries/__init__.py +++ b/tests/components/ourgroceries/__init__.py @@ -1,6 +1,10 @@ """Tests for the OurGroceries integration.""" +from typing import Any -def items_to_shopping_list(items: list, version_id: str = "1") -> dict[dict[list]]: + +def items_to_shopping_list( + items: list, version_id: str = "1" +) -> dict[str, dict[str, Any]]: """Convert a list of items into a shopping list.""" return {"list": {"versionId": version_id, "items": items}} diff --git a/tests/components/ourgroceries/conftest.py b/tests/components/ourgroceries/conftest.py index bc8c632b511..b3fb4e9bcc6 100644 --- a/tests/components/ourgroceries/conftest.py +++ b/tests/components/ourgroceries/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the OurGroceries tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.ourgroceries import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/ourgroceries/test_todo.py b/tests/components/ourgroceries/test_todo.py index 672e2e14447..d364881b624 100644 --- a/tests/components/ourgroceries/test_todo.py +++ b/tests/components/ourgroceries/test_todo.py @@ -7,8 +7,14 @@ from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.ourgroceries.coordinator import SCAN_INTERVAL -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -69,9 +75,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda"}, - target={"entity_id": "todo.test_list"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) @@ -108,9 +114,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "completed"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -132,9 +138,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "status": "needs_action"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.toggle_item_crossed_off.called @@ -181,9 +187,9 @@ async def test_update_todo_item_summary( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "12345", "rename": "Milk"}, - target={"entity_id": "todo.test_list"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "12345", ATTR_RENAME: "Milk"}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.change_item_on_list @@ -218,9 +224,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["12345", "54321"]}, - target={"entity_id": "todo.test_list"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["12345", "54321"]}, + target={ATTR_ENTITY_ID: "todo.test_list"}, blocking=True, ) assert ourgroceries.remove_item_from_list.call_count == 2 diff --git a/tests/components/overkiz/conftest.py b/tests/components/overkiz/conftest.py index 8ab26e3587b..151d0719ddb 100644 --- a/tests/components/overkiz/conftest.py +++ b/tests/components/overkiz/conftest.py @@ -1,9 +1,9 @@ """Configuration for overkiz tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.overkiz.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 8246a7f51ac..0648a94c70b 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -285,8 +285,6 @@ BAD_MESSAGE = {"_type": "unsupported", "tst": 1} BAD_JSON_PREFIX = "--$this is bad json#--" BAD_JSON_SUFFIX = "** and it ends here ^^" -# pylint: disable=len-as-condition - @pytest.fixture def setup_comp( diff --git a/tests/components/permobil/conftest.py b/tests/components/permobil/conftest.py index ed6a843b206..d3630d3f366 100644 --- a/tests/components/permobil/conftest.py +++ b/tests/components/permobil/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the MyPermobil tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from mypermobil import MyPermobil import pytest -from typing_extensions import Generator from .const import MOCK_REGION_NAME, MOCK_TOKEN, MOCK_URL diff --git a/tests/components/persistent_notification/conftest.py b/tests/components/persistent_notification/conftest.py index d665c0075b3..29ba5a6008a 100644 --- a/tests/components/persistent_notification/conftest.py +++ b/tests/components/persistent_notification/conftest.py @@ -3,10 +3,11 @@ import pytest import homeassistant.components.persistent_notification as pn +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture(autouse=True) -async def setup_integration(hass): +async def setup_integration(hass: HomeAssistant) -> None: """Set up persistent notification integration.""" assert await async_setup_component(hass, pn.DOMAIN, {}) diff --git a/tests/components/philips_js/conftest.py b/tests/components/philips_js/conftest.py index b6c78fe9e5e..4a79fce85a2 100644 --- a/tests/components/philips_js/conftest.py +++ b/tests/components/philips_js/conftest.py @@ -1,16 +1,18 @@ """Standard setup for tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, create_autospec, patch from haphilipsjs import PhilipsTV import pytest -from typing_extensions import Generator from homeassistant.components.philips_js.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from . import MOCK_CONFIG, MOCK_ENTITY_ID, MOCK_NAME, MOCK_SERIAL_NO, MOCK_SYSTEM -from tests.common import MockConfigEntry, mock_device_registry +from tests.common import MockConfigEntry @pytest.fixture @@ -27,11 +29,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture(autouse=True) -async def setup_notification(hass): - """Configure notification system.""" - - @pytest.fixture(autouse=True) def mock_tv(): """Disable component actual use.""" @@ -62,7 +59,7 @@ def mock_tv(): @pytest.fixture -async def mock_config_entry(hass): +async def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Get standard player.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_CONFIG, title=MOCK_NAME, unique_id=MOCK_SERIAL_NO @@ -72,13 +69,7 @@ async def mock_config_entry(hass): @pytest.fixture -def mock_device_reg(hass): - """Get standard device.""" - return mock_device_registry(hass) - - -@pytest.fixture -async def mock_entity(hass, mock_device_reg, mock_config_entry): +async def mock_entity(hass: HomeAssistant, mock_config_entry: MockConfigEntry) -> str: """Get standard player.""" assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -86,9 +77,13 @@ async def mock_entity(hass, mock_device_reg, mock_config_entry): @pytest.fixture -def mock_device(hass, mock_device_reg, mock_entity, mock_config_entry): +def mock_device( + device_registry: dr.DeviceRegistry, + mock_entity: str, + mock_config_entry: MockConfigEntry, +) -> dr.DeviceEntry: """Get standard device.""" - return mock_device_reg.async_get_or_create( + return device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, identifiers={(DOMAIN, MOCK_SERIAL_NO)}, ) diff --git a/tests/components/philips_js/test_device_trigger.py b/tests/components/philips_js/test_device_trigger.py index b9b7439d2fa..8f2e5543f1e 100644 --- a/tests/components/philips_js/test_device_trigger.py +++ b/tests/components/philips_js/test_device_trigger.py @@ -9,7 +9,7 @@ from homeassistant.components.philips_js.const import DOMAIN from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service +from tests.common import async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -17,12 +17,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: """Test we get the expected triggers.""" expected_triggers = [ @@ -42,7 +36,11 @@ async def test_get_triggers(hass: HomeAssistant, mock_device) -> None: async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, calls: list[ServiceCall], mock_tv, mock_entity, mock_device + hass: HomeAssistant, + service_calls: list[ServiceCall], + mock_tv, + mock_entity, + mock_device, ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -80,6 +78,10 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == mock_device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[0].domain == "media_player" + assert service_calls[0].service == "turn_on" + assert service_calls[1].domain == "test" + assert service_calls[1].service == "automation" + assert service_calls[1].data["some"] == mock_device.id + assert service_calls[1].data["id"] == 0 diff --git a/tests/components/philips_js/test_diagnostics.py b/tests/components/philips_js/test_diagnostics.py index cb3235b9780..d61546e52c3 100644 --- a/tests/components/philips_js/test_diagnostics.py +++ b/tests/components/philips_js/test_diagnostics.py @@ -63,4 +63,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/pi_hole/test_diagnostics.py b/tests/components/pi_hole/test_diagnostics.py index c9fc9a0a9b8..8d5a83e4622 100644 --- a/tests/components/pi_hole/test_diagnostics.py +++ b/tests/components/pi_hole/test_diagnostics.py @@ -1,6 +1,7 @@ """Test pi_hole component.""" from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components import pi_hole from homeassistant.core import HomeAssistant @@ -28,4 +29,6 @@ async def test_diagnostics( await hass.async_block_till_done() - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index cdd30967058..2db5bc90159 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,7 +5,8 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import DOMAIN +from homeassistant.components.todo import ATTR_ITEM, DOMAIN, TodoServices +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,9 +92,9 @@ async def test_create_todo_list_item( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) @@ -119,8 +120,8 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "Melk"}, - target={"entity_id": ENTITY_ID}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Melk"}, + target={ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, ) diff --git a/tests/components/pilight/test_sensor.py b/tests/components/pilight/test_sensor.py index 97e031736e5..9f529117642 100644 --- a/tests/components/pilight/test_sensor.py +++ b/tests/components/pilight/test_sensor.py @@ -12,7 +12,7 @@ from tests.common import assert_setup_component, mock_component @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "pilight") diff --git a/tests/components/ping/test_device_tracker.py b/tests/components/ping/test_device_tracker.py index 5aa425226b3..4a5d6ba94ed 100644 --- a/tests/components/ping/test_device_tracker.py +++ b/tests/components/ping/test_device_tracker.py @@ -1,12 +1,12 @@ """Test the binary sensor platform of ping.""" +from collections.abc import Generator from datetime import timedelta from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory from icmplib import Host import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/plaato/test_config_flow.py b/tests/components/plaato/test_config_flow.py index efda354f20d..ceadab7f832 100644 --- a/tests/components/plaato/test_config_flow.py +++ b/tests/components/plaato/test_config_flow.py @@ -64,8 +64,8 @@ async def test_show_config_form_device_type_airlock(hass: HomeAssistant) -> None assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) == str - assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) == bool + assert result["data_schema"].schema.get(CONF_TOKEN) is str + assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is bool async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: @@ -78,7 +78,7 @@ async def test_show_config_form_device_type_keg(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "api_method" - assert result["data_schema"].schema.get(CONF_TOKEN) == str + assert result["data_schema"].schema.get(CONF_TOKEN) is str assert result["data_schema"].schema.get(CONF_USE_WEBHOOK) is None diff --git a/tests/components/plex/conftest.py b/tests/components/plex/conftest.py index a061d9c1105..53c032cb08b 100644 --- a/tests/components/plex/conftest.py +++ b/tests/components/plex/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Plex tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock -from typing_extensions import Generator from homeassistant.components.plex.const import DOMAIN, PLEX_SERVER_CONFIG, SERVERS from homeassistant.const import CONF_URL diff --git a/tests/components/plex/test_update.py b/tests/components/plex/test_update.py index 942162665af..7ad2481a726 100644 --- a/tests/components/plex/test_update.py +++ b/tests/components/plex/test_update.py @@ -9,7 +9,8 @@ from homeassistant.components.update import ( SERVICE_INSTALL, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant, HomeAssistantError +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry, async_fire_time_changed diff --git a/tests/components/plugwise/conftest.py b/tests/components/plugwise/conftest.py index 83826a0a543..ec857a965e5 100644 --- a/tests/components/plugwise/conftest.py +++ b/tests/components/plugwise/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from plugwise import PlugwiseData import pytest -from typing_extensions import Generator from homeassistant.components.plugwise.const import DOMAIN from homeassistant.const import ( diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index c91e4d37ba6..70cef16bcdc 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -14,7 +14,7 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -196,7 +196,7 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/poolsense/conftest.py b/tests/components/poolsense/conftest.py index ac16ef23ff3..6a842df7cfd 100644 --- a/tests/components/poolsense/conftest.py +++ b/tests/components/poolsense/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Poolsense tests.""" +from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.poolsense.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 499d1a5df14..12643c39dfa 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -16,6 +16,7 @@ from homeassistant.components import ( counter, cover, device_tracker, + fan, humidifier, input_boolean, input_number, @@ -30,11 +31,23 @@ from homeassistant.components import ( ) from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, + ATTR_FAN_MODE, + ATTR_FAN_MODES, ATTR_HUMIDITY, ATTR_HVAC_ACTION, + ATTR_HVAC_MODES, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, ) +from homeassistant.components.fan import ( + ATTR_DIRECTION, + ATTR_OSCILLATING, + ATTR_PERCENTAGE, + ATTR_PRESET_MODE, + ATTR_PRESET_MODES, + DIRECTION_FORWARD, + DIRECTION_REVERSE, +) from homeassistant.components.humidifier import ATTR_AVAILABLE_MODES from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( @@ -395,6 +408,18 @@ async def test_climate( 'entity="climate.fritzdect",' 'friendly_name="Fritz!DECT"} 0.0' in body ) + assert ( + 'climate_preset_mode{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="away"} 1.0' in body + ) + assert ( + 'climate_fan_mode{domain="climate",' + 'entity="climate.ecobee",' + 'friendly_name="Ecobee",' + 'mode="auto"} 1.0' in body + ) @pytest.mark.parametrize("namespace", [""]) @@ -562,6 +587,51 @@ async def test_lock( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_fan( + client: ClientSessionGenerator, fan_entities: dict[str, er.RegistryEntry] +) -> None: + """Test prometheus metrics for fan.""" + body = await generate_latest_metrics(client) + + assert ( + 'fan_state{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 1.0' in body + ) + + assert ( + 'fan_speed_percent{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 33.0' in body + ) + + assert ( + 'fan_is_oscillating{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 1.0' in body + ) + + assert ( + 'fan_direction_reversed{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1"} 0.0' in body + ) + + assert ( + 'fan_preset_mode{domain="fan",' + 'entity="fan.fan_1",' + 'friendly_name="Fan 1",' + 'mode="LO"} 1.0' in body + ) + + assert ( + 'fan_direction_reversed{domain="fan",' + 'entity="fan.fan_2",' + 'friendly_name="Reverse Fan"} 1.0' in body + ) + + @pytest.mark.parametrize("namespace", [""]) async def test_cover( client: ClientSessionGenerator, cover_entities: dict[str, er.RegistryEntry] @@ -1359,6 +1429,11 @@ async def climate_fixture( ATTR_TARGET_TEMP_LOW: 21, ATTR_TARGET_TEMP_HIGH: 24, ATTR_HVAC_ACTION: climate.HVACAction.COOLING, + ATTR_HVAC_MODES: ["off", "heat", "cool", "heat_cool"], + ATTR_PRESET_MODE: "away", + ATTR_PRESET_MODES: ["away", "home", "sleep"], + ATTR_FAN_MODE: "auto", + ATTR_FAN_MODES: ["auto", "on"], } set_state_with_entry( hass, climate_2, climate.HVACAction.HEATING, climate_2_attributes @@ -1788,6 +1863,46 @@ async def switch_fixture( return data +@pytest.fixture(name="fan_entities") +async def fan_fixture( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> dict[str, er.RegistryEntry]: + """Simulate fan entities.""" + data = {} + fan_1 = entity_registry.async_get_or_create( + domain=fan.DOMAIN, + platform="test", + unique_id="fan_1", + suggested_object_id="fan_1", + original_name="Fan 1", + ) + fan_1_attributes = { + ATTR_DIRECTION: DIRECTION_FORWARD, + ATTR_OSCILLATING: True, + ATTR_PERCENTAGE: 33, + ATTR_PRESET_MODE: "LO", + ATTR_PRESET_MODES: ["LO", "OFF", "HI"], + } + set_state_with_entry(hass, fan_1, STATE_ON, fan_1_attributes) + data["fan_1"] = fan_1 + data["fan_1_attributes"] = fan_1_attributes + + fan_2 = entity_registry.async_get_or_create( + domain=fan.DOMAIN, + platform="test", + unique_id="fan_2", + suggested_object_id="fan_2", + original_name="Reverse Fan", + ) + fan_2_attributes = {ATTR_DIRECTION: DIRECTION_REVERSE} + set_state_with_entry(hass, fan_2, STATE_ON, fan_2_attributes) + data["fan_2"] = fan_2 + data["fan_2_attributes"] = fan_2_attributes + + await hass.async_block_till_done() + return data + + @pytest.fixture(name="person_entities") async def person_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry diff --git a/tests/components/prosegur/test_alarm_control_panel.py b/tests/components/prosegur/test_alarm_control_panel.py index b65b86b3049..f66d070f218 100644 --- a/tests/components/prosegur/test_alarm_control_panel.py +++ b/tests/components/prosegur/test_alarm_control_panel.py @@ -1,10 +1,10 @@ """Tests for the Prosegur alarm control panel device.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from pyprosegur.installation import Status import pytest -from typing_extensions import Generator from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN from homeassistant.const import ( diff --git a/tests/components/proximity/test_diagnostics.py b/tests/components/proximity/test_diagnostics.py index a60c592fcab..e4f22236808 100644 --- a/tests/components/proximity/test_diagnostics.py +++ b/tests/components/proximity/test_diagnostics.py @@ -72,5 +72,12 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry( hass, hass_client, mock_entry ) == snapshot( - exclude=props("entry_id", "last_changed", "last_reported", "last_updated") + exclude=props( + "entry_id", + "last_changed", + "last_reported", + "last_updated", + "created_at", + "modified_at", + ) ) diff --git a/tests/components/prusalink/test_binary_sensor.py b/tests/components/prusalink/test_binary_sensor.py new file mode 100644 index 00000000000..c39b15471c6 --- /dev/null +++ b/tests/components/prusalink/test_binary_sensor.py @@ -0,0 +1,33 @@ +"""Test Prusalink sensors.""" + +from unittest.mock import PropertyMock, patch + +import pytest + +from homeassistant.const import STATE_OFF, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +@pytest.fixture(autouse=True) +def setup_binary_sensor_platform_only(): + """Only setup sensor platform.""" + with ( + patch("homeassistant.components.prusalink.PLATFORMS", [Platform.BINARY_SENSOR]), + patch( + "homeassistant.helpers.entity.Entity.entity_registry_enabled_default", + PropertyMock(return_value=True), + ), + ): + yield + + +async def test_binary_sensors_no_job( + hass: HomeAssistant, mock_config_entry, mock_api +) -> None: + """Test sensors while no job active.""" + assert await async_setup_component(hass, "prusalink", {}) + + state = hass.states.get("binary_sensor.mock_title_mmu") + assert state is not None + assert state.state == STATE_OFF diff --git a/tests/components/prusalink/test_sensor.py b/tests/components/prusalink/test_sensor.py index b15e9198da6..c0693626600 100644 --- a/tests/components/prusalink/test_sensor.py +++ b/tests/components/prusalink/test_sensor.py @@ -101,6 +101,10 @@ async def test_sensors_no_job(hass: HomeAssistant, mock_config_entry, mock_api) assert state is not None assert state.state == "PLA" + state = hass.states.get("sensor.mock_title_nozzle_diameter") + assert state is not None + assert state.state == "0.4" + state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" @@ -205,6 +209,10 @@ async def test_sensors_idle_job_mk3( assert state is not None assert state.state == "PLA" + state = hass.states.get("sensor.mock_title_nozzle_diameter") + assert state is not None + assert state.state == "0.4" + state = hass.states.get("sensor.mock_title_print_flow") assert state is not None assert state.state == "100" diff --git a/tests/components/ps4/conftest.py b/tests/components/ps4/conftest.py index bc84ea3b4db..c95cc78f53a 100644 --- a/tests/components/ps4/conftest.py +++ b/tests/components/ps4/conftest.py @@ -1,10 +1,10 @@ """Test configuration for PS4.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch from pyps4_2ndscreen.ddp import DEFAULT_UDP_PORT, DDPProtocol import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/pure_energie/conftest.py b/tests/components/pure_energie/conftest.py index 7174befbf5b..9aa3a4cc1b4 100644 --- a/tests/components/pure_energie/conftest.py +++ b/tests/components/pure_energie/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Pure Energie integration tests.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from gridnet import Device as GridNetDevice, SmartBridge import pytest -from typing_extensions import Generator from homeassistant.components.pure_energie.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/purpleair/test_diagnostics.py b/tests/components/purpleair/test_diagnostics.py index 13dcd1338e0..599549bb723 100644 --- a/tests/components/purpleair/test_diagnostics.py +++ b/tests/components/purpleair/test_diagnostics.py @@ -3,6 +3,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -34,6 +35,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": { "fields": [ diff --git a/tests/components/pushover/test_init.py b/tests/components/pushover/test_init.py index c3a653042ce..85266e34d13 100644 --- a/tests/components/pushover/test_init.py +++ b/tests/components/pushover/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import MagicMock, patch from pushover_complete import BadAPIRequestError import pytest import requests_mock +from urllib3.exceptions import MaxRetryError from homeassistant.components.pushover.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -93,3 +94,18 @@ async def test_async_setup_entry_failed_json_error( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_async_setup_entry_failed_urrlib3_error( + hass: HomeAssistant, mock_pushover: MagicMock +) -> None: + """Test pushover failed setup due to conn error.""" + entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_CONFIG, + ) + entry.add_to_hass(hass) + mock_pushover.side_effect = MaxRetryError(MagicMock(), MagicMock()) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/pvoutput/conftest.py b/tests/components/pvoutput/conftest.py index d19f09d9e6c..a55bb21d2ae 100644 --- a/tests/components/pvoutput/conftest.py +++ b/tests/components/pvoutput/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from pvo import Status, System import pytest -from typing_extensions import Generator from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/pyload/conftest.py b/tests/components/pyload/conftest.py index 1d7b11567c7..c0f181396ab 100644 --- a/tests/components/pyload/conftest.py +++ b/tests/components/pyload/conftest.py @@ -1,7 +1,7 @@ """Fixtures for pyLoad integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from pyloadapi.types import LoginResponse, StatusServerResponse import pytest @@ -72,7 +72,7 @@ def pyload_config() -> ConfigType: @pytest.fixture -def mock_pyloadapi() -> Generator[AsyncMock, None, None]: +def mock_pyloadapi() -> Generator[MagicMock]: """Mock PyLoadAPI.""" with ( patch( diff --git a/tests/components/pyload/test_button.py b/tests/components/pyload/test_button.py index b5aa18ad3d9..9a2f480bede 100644 --- a/tests/components/pyload/test_button.py +++ b/tests/components/pyload/test_button.py @@ -1,8 +1,9 @@ """The tests for the button component.""" -from collections.abc import AsyncGenerator +from collections.abc import Generator from unittest.mock import AsyncMock, call, patch +from pyloadapi import CannotConnect, InvalidAuth import pytest from syrupy.assertion import SnapshotAssertion @@ -11,6 +12,7 @@ from homeassistant.components.pyload.button import PyLoadButtonEntity from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -24,7 +26,7 @@ API_CALL = { @pytest.fixture(autouse=True) -async def button_only() -> AsyncGenerator[None, None]: +def button_only() -> Generator[None]: """Enable only the button platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", @@ -78,6 +80,43 @@ async def test_button_press( {ATTR_ENTITY_ID: entity_entry.entity_id}, blocking=True, ) - await hass.async_block_till_done() assert API_CALL[entity_entry.translation_key] in mock_pyloadapi.method_calls mock_pyloadapi.reset_mock() + + +@pytest.mark.parametrize( + ("side_effect"), + [CannotConnect, InvalidAuth], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button_press_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + entity_registry: er.EntityRegistry, + side_effect: Exception, +) -> None: + """Test button press method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + mock_pyloadapi.stop_all_downloads.side_effect = side_effect + mock_pyloadapi.restart_failed.side_effect = side_effect + mock_pyloadapi.delete_finished.side_effect = side_effect + mock_pyloadapi.restart.side_effect = side_effect + + for entity_entry in entity_entries: + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) diff --git a/tests/components/pyload/test_sensor.py b/tests/components/pyload/test_sensor.py index 3e18faca12b..8c194a111ea 100644 --- a/tests/components/pyload/test_sensor.py +++ b/tests/components/pyload/test_sensor.py @@ -1,6 +1,6 @@ """Tests for the pyLoad Sensors.""" -from collections.abc import AsyncGenerator +from collections.abc import Generator from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory @@ -22,7 +22,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None, None]: +def sensor_only() -> Generator[None]: """Enable only the sensor platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", diff --git a/tests/components/pyload/test_switch.py b/tests/components/pyload/test_switch.py index 42a6bfa6f14..493dbd8c0da 100644 --- a/tests/components/pyload/test_switch.py +++ b/tests/components/pyload/test_switch.py @@ -1,8 +1,9 @@ """Tests for the pyLoad Switches.""" -from collections.abc import AsyncGenerator +from collections.abc import Generator from unittest.mock import AsyncMock, call, patch +from pyloadapi import CannotConnect, InvalidAuth import pytest from syrupy.assertion import SnapshotAssertion @@ -16,6 +17,7 @@ from homeassistant.components.switch import ( from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -36,7 +38,7 @@ API_CALL = { @pytest.fixture(autouse=True) -async def switch_only() -> AsyncGenerator[None, None]: +def switch_only() -> Generator[None]: """Enable only the switch platform.""" with patch( "homeassistant.components.pyload.PLATFORMS", @@ -102,3 +104,49 @@ async def test_turn_on_off( in mock_pyloadapi.method_calls ) mock_pyloadapi.reset_mock() + + +@pytest.mark.parametrize( + ("service_call"), + [ + SERVICE_TURN_ON, + SERVICE_TURN_OFF, + SERVICE_TOGGLE, + ], +) +@pytest.mark.parametrize( + ("side_effect"), + [CannotConnect, InvalidAuth], +) +async def test_turn_on_off_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, + service_call: str, + entity_registry: er.EntityRegistry, + side_effect: Exception, +) -> None: + """Test switch turn on/off, toggle method.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + mock_pyloadapi.unpause.side_effect = side_effect + mock_pyloadapi.pause.side_effect = side_effect + mock_pyloadapi.toggle_pause.side_effect = side_effect + mock_pyloadapi.toggle_reconnect.side_effect = side_effect + + for entity_entry in entity_entries: + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SWITCH_DOMAIN, + service_call, + {ATTR_ENTITY_ID: entity_entry.entity_id}, + blocking=True, + ) diff --git a/tests/components/python_script/test_init.py b/tests/components/python_script/test_init.py index 03fa73f076e..c4dc00c448a 100644 --- a/tests/components/python_script/test_init.py +++ b/tests/components/python_script/test_init.py @@ -155,7 +155,7 @@ raise Exception('boom') task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == HomeAssistantError + assert type(task.exception()) is HomeAssistantError assert "Error executing script (Exception): boom" in str(task.exception()) @@ -183,7 +183,7 @@ hass.async_stop() task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == ServiceValidationError + assert type(task.exception()) is ServiceValidationError assert "Not allowed to access async methods" in str(task.exception()) @@ -233,7 +233,7 @@ async def test_accessing_forbidden_methods_with_response(hass: HomeAssistant) -> task = hass.async_add_executor_job(execute, hass, "test.py", source, {}, True) await hass.async_block_till_done(wait_background_tasks=True) - assert type(task.exception()) == ServiceValidationError + assert type(task.exception()) is ServiceValidationError assert f"Not allowed to access {name}" in str(task.exception()) diff --git a/tests/components/qbittorrent/conftest.py b/tests/components/qbittorrent/conftest.py index b15e2a6865b..17fb8e15b47 100644 --- a/tests/components/qbittorrent/conftest.py +++ b/tests/components/qbittorrent/conftest.py @@ -1,10 +1,10 @@ """Fixtures for testing qBittorrent component.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest import requests_mock -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/qnap/conftest.py b/tests/components/qnap/conftest.py index c0947318f60..2625f1805b6 100644 --- a/tests/components/qnap/conftest.py +++ b/tests/components/qnap/conftest.py @@ -1,9 +1,9 @@ """Setup the QNAP tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator TEST_HOST = "1.2.3.4" TEST_USERNAME = "admin" diff --git a/tests/components/qnap_qsw/test_diagnostics.py b/tests/components/qnap_qsw/test_diagnostics.py index 8bca9d8d989..ccaac458b12 100644 --- a/tests/components/qnap_qsw/test_diagnostics.py +++ b/tests/components/qnap_qsw/test_diagnostics.py @@ -25,7 +25,7 @@ from aioqsw.const import ( QSD_SYSTEM_TIME, QSD_TEMP, QSD_TEMP_MAX, - QSD_UPTIME, + QSD_UPTIME_SECONDS, QSD_VERSION, ) @@ -118,6 +118,6 @@ async def test_config_entry_diagnostics( assert ( sys_time_diag.items() >= { - QSD_UPTIME: sys_time_mock[API_UPTIME], + QSD_UPTIME_SECONDS: sys_time_mock[API_UPTIME], }.items() ) diff --git a/tests/components/rabbitair/test_config_flow.py b/tests/components/rabbitair/test_config_flow.py index 2e0cfba38c0..7f9479339a5 100644 --- a/tests/components/rabbitair/test_config_flow.py +++ b/tests/components/rabbitair/test_config_flow.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from ipaddress import ip_address from unittest.mock import MagicMock, Mock, patch import pytest from rabbitair import Mode, Model, Speed -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components import zeroconf diff --git a/tests/components/radio_browser/conftest.py b/tests/components/radio_browser/conftest.py index 95fda545a6c..fc666b32c53 100644 --- a/tests/components/radio_browser/conftest.py +++ b/tests/components/radio_browser/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.radio_browser.const import DOMAIN diff --git a/tests/components/rainbird/conftest.py b/tests/components/rainbird/conftest.py index a2c26c71231..b0411d9d313 100644 --- a/tests/components/rainbird/conftest.py +++ b/tests/components/rainbird/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus import json from typing import Any @@ -9,7 +10,6 @@ from unittest.mock import patch from pyrainbird import encryption import pytest -from typing_extensions import Generator from homeassistant.components.rainbird import DOMAIN from homeassistant.components.rainbird.const import ( diff --git a/tests/components/rainbird/test_config_flow.py b/tests/components/rainbird/test_config_flow.py index cdcef95f458..87506ad656c 100644 --- a/tests/components/rainbird/test_config_flow.py +++ b/tests/components/rainbird/test_config_flow.py @@ -1,11 +1,11 @@ """Tests for the Rain Bird config flow.""" +from collections.abc import AsyncGenerator from http import HTTPStatus from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import AsyncGenerator from homeassistant import config_entries from homeassistant.components.rainbird import DOMAIN @@ -40,7 +40,7 @@ def mock_responses() -> list[AiohttpClientMockResponse]: @pytest.fixture(autouse=True) -async def config_entry_data() -> None: +async def config_entry_data() -> dict[str, Any] | None: """Fixture to disable config entry setup for exercising config flow.""" return None diff --git a/tests/components/rainforest_raven/conftest.py b/tests/components/rainforest_raven/conftest.py index 0a809c6430a..35ce4443032 100644 --- a/tests/components/rainforest_raven/conftest.py +++ b/tests/components/rainforest_raven/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Rainforest RAVEn tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant diff --git a/tests/components/rainforest_raven/test_config_flow.py b/tests/components/rainforest_raven/test_config_flow.py index 7f7041cbcd8..da7e65882a4 100644 --- a/tests/components/rainforest_raven/test_config_flow.py +++ b/tests/components/rainforest_raven/test_config_flow.py @@ -1,11 +1,11 @@ """Test Rainforest RAVEn config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from aioraven.device import RAVEnConnectionError import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from homeassistant.components.rainforest_raven.const import DOMAIN from homeassistant.config_entries import SOURCE_USB, SOURCE_USER diff --git a/tests/components/rainmachine/test_diagnostics.py b/tests/components/rainmachine/test_diagnostics.py index 1fc03ab357a..ad5743957dd 100644 --- a/tests/components/rainmachine/test_diagnostics.py +++ b/tests/components/rainmachine/test_diagnostics.py @@ -2,6 +2,7 @@ from regenmaschine.errors import RainMachineError from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -17,10 +18,9 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) async def test_entry_diagnostics_failed_controller_diagnostics( @@ -33,7 +33,6 @@ async def test_entry_diagnostics_failed_controller_diagnostics( ) -> None: """Test config entry diagnostics when the controller diagnostics API call fails.""" controller.diagnostics.current.side_effect = RainMachineError - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/rdw/conftest.py b/tests/components/rdw/conftest.py index 3f45f44e3d8..71c73a55441 100644 --- a/tests/components/rdw/conftest.py +++ b/tests/components/rdw/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from vehicle import Vehicle from homeassistant.components.rdw.const import CONF_LICENSE_PLATE, DOMAIN diff --git a/tests/components/recollect_waste/test_diagnostics.py b/tests/components/recollect_waste/test_diagnostics.py index 6c8549786e8..2b92892b1d1 100644 --- a/tests/components/recollect_waste/test_diagnostics.py +++ b/tests/components/recollect_waste/test_diagnostics.py @@ -5,6 +5,7 @@ from homeassistant.core import HomeAssistant from .conftest import TEST_SERVICE_ID +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -30,6 +31,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "data": [ { diff --git a/tests/components/recorder/auto_repairs/events/test_schema.py b/tests/components/recorder/auto_repairs/events/test_schema.py index e3b2638eded..cae181a6270 100644 --- a/tests/components/recorder/auto_repairs/events/test_schema.py +++ b/tests/components/recorder/auto_repairs/events/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,8 +40,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -50,8 +57,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_event_data( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -66,8 +73,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( return_value={"event_data.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -83,8 +90,8 @@ async def test_validate_db_schema_fix_utf8_issue_event_data( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -99,8 +106,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"events.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/states/test_schema.py b/tests/components/recorder/auto_repairs/states/test_schema.py index 58910a4441a..915ac1f3500 100644 --- a/tests/components/recorder/auto_repairs/states/test_schema.py +++ b/tests/components/recorder/auto_repairs/states/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,8 +40,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -52,8 +59,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -68,8 +75,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( return_value={"states.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -84,8 +91,8 @@ async def test_validate_db_schema_fix_utf8_issue_states( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_utf8_issue_state_attributes( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -100,8 +107,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( return_value={"state_attributes.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -117,8 +124,8 @@ async def test_validate_db_schema_fix_utf8_issue_state_attributes( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -133,8 +140,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"states.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index 175cb6ecd1a..a2cf41578c7 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -1,7 +1,6 @@ """Test removing statistics duplicates.""" import importlib -from pathlib import Path import sys from unittest.mock import patch @@ -10,17 +9,14 @@ from sqlalchemy import create_engine from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import Recorder, statistics +from homeassistant.components.recorder import statistics from homeassistant.components.recorder.auto_repairs.statistics.duplicates import ( delete_statistics_duplicates, delete_statistics_meta_duplicates, ) -from homeassistant.components.recorder.const import SQLITE_URL_PREFIX from homeassistant.components.recorder.statistics import async_add_external_statistics from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from ...common import async_wait_recording_done @@ -31,20 +27,15 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" -@pytest.fixture -def setup_recorder(recorder_mock: Recorder) -> None: - """Set up recorder.""" - - +@pytest.mark.usefixtures("recorder_mock") async def test_delete_duplicates_no_duplicates( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - setup_recorder: None, ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) @@ -56,10 +47,10 @@ async def test_delete_duplicates_no_duplicates( assert "Found duplicated" not in caplog.text +@pytest.mark.usefixtures("recorder_mock") async def test_duplicate_statistics_handle_integrity_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - setup_recorder: None, ) -> None: """Test the recorder does not blow up if statistics is duplicated.""" await async_wait_recording_done(hass) @@ -140,15 +131,13 @@ def _create_engine_28(*args, **kwargs): return engine +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -205,11 +194,10 @@ async def test_delete_metadata_duplicates( new=_create_engine_28, ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -228,9 +216,10 @@ async def test_delete_metadata_duplicates( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -247,15 +236,13 @@ async def test_delete_metadata_duplicates( await hass.async_stop() +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_delete_metadata_duplicates_many( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - module = "tests.components.recorder.db_schema_28" importlib.import_module(module) old_db_schema = sys.modules[module] @@ -324,11 +311,10 @@ async def test_delete_metadata_duplicates_many( new=_create_engine_28, ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -338,9 +324,10 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass), + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -359,8 +346,9 @@ async def test_delete_metadata_duplicates_many( await hass.async_stop() +@pytest.mark.usefixtures("recorder_mock") async def test_delete_metadata_duplicates_no_duplicates( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, setup_recorder: None + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics.""" await async_wait_recording_done(hass) diff --git a/tests/components/recorder/auto_repairs/statistics/test_schema.py b/tests/components/recorder/auto_repairs/statistics/test_schema.py index f4e1d74aadf..34a075afbc7 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_schema.py +++ b/tests/components/recorder/auto_repairs/statistics/test_schema.py @@ -11,11 +11,18 @@ from ...common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("db_engine", ["mysql"]) @pytest.mark.parametrize("enable_schema_validation", [True]) async def test_validate_db_schema_fix_utf8_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -30,8 +37,8 @@ async def test_validate_db_schema_fix_utf8_issue( return_value={"statistics_meta.4-byte UTF-8"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -48,8 +55,8 @@ async def test_validate_db_schema_fix_utf8_issue( @pytest.mark.parametrize("table", ["statistics_short_term", "statistics"]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema_fix_float_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, table: str, db_engine: str, @@ -68,8 +75,8 @@ async def test_validate_db_schema_fix_float_issue( "homeassistant.components.recorder.migration._modify_columns" ) as modify_columns_mock, ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( @@ -92,8 +99,8 @@ async def test_validate_db_schema_fix_float_issue( @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql"]) async def test_validate_db_schema_fix_collation_issue( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_dialect_name: None, db_engine: str, @@ -108,8 +115,8 @@ async def test_validate_db_schema_fix_collation_issue( return_value={"statistics.utf8mb4_unicode_ci"}, ), ): - await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) + async with async_test_recorder(hass): + await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert ( diff --git a/tests/components/recorder/auto_repairs/test_schema.py b/tests/components/recorder/auto_repairs/test_schema.py index d921c0cdbf8..857c0f6572f 100644 --- a/tests/components/recorder/auto_repairs/test_schema.py +++ b/tests/components/recorder/auto_repairs/test_schema.py @@ -3,6 +3,7 @@ import pytest from sqlalchemy import text +from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.auto_repairs.schema import ( correct_db_schema_precision, correct_db_schema_utf8, @@ -12,7 +13,7 @@ from homeassistant.components.recorder.auto_repairs.schema import ( ) from homeassistant.components.recorder.db_schema import States from homeassistant.components.recorder.migration import _modify_columns -from homeassistant.components.recorder.util import get_instance, session_scope +from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant from ..common import async_wait_recording_done @@ -20,11 +21,18 @@ from ..common import async_wait_recording_done from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.mark.parametrize("enable_schema_validation", [True]) @pytest.mark.parametrize("db_engine", ["mysql", "postgresql"]) async def test_validate_db_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, db_engine: str, recorder_dialect_name: None, @@ -33,46 +41,37 @@ async def test_validate_db_schema( Note: The test uses SQLite, the purpose is only to exercise the code. """ - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) assert "Schema validation failed" not in caplog.text assert "Detected statistics schema errors" not in caplog.text assert "Database is about to correct DB schema errors" not in caplog.text +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_good_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is correct.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, (States.state,) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and repairing it.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -84,38 +83,34 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema( ) ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, (States.state,) + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, (States.state,) ) assert schema_errors == {"states.4-byte UTF-8"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_utf8, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_utf8, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, ("state",) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, ("state",) ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_incorrect_collation( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the collation is incorrect.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -126,59 +121,51 @@ async def test_validate_db_schema_fix_incorrect_collation( ) ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_has_correct_collation, instance, States + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_has_correct_collation, recorder_mock, States ) assert schema_errors == {"states.utf8mb4_unicode_ci"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_utf8, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_utf8, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( - validate_table_schema_has_correct_collation, instance, States + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_has_correct_collation, recorder_mock, States ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_correct_collation( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct with the correct collation.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_table_schema_has_correct_collation, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema with MySQL when the schema is broken and cannot be repaired.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): with session_scope(session=session_maker()) as session: @@ -189,63 +176,55 @@ async def test_validate_db_schema_fix_utf8_issue_with_broken_schema_unrepairable "LOCK=EXCLUSIVE;" ) ) - _modify_columns( - session_maker, - instance.engine, - "states", - [ - "entity_id VARCHAR(255) NOT NULL", - ], - ) + _modify_columns( + session_maker, + recorder_mock.engine, + "states", + [ + "entity_id VARCHAR(255) NOT NULL", + ], + ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( - validate_table_schema_supports_utf8, instance, States, ("state",) + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( + validate_table_schema_supports_utf8, recorder_mock, States, ("state",) ) assert schema_errors == set() assert "Error when validating DB schema" in caplog.text +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_good_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is correct.""" - if not recorder_db_url.startswith(("mysql://", "postgresql://")): - # This problem only happens on MySQL and PostgreSQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and than repair it.""" - if not recorder_db_url.startswith(("mysql://", "postgresql://")): - # This problem only happens on MySQL and PostgreSQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): _modify_columns( session_maker, - instance.engine, + recorder_mock.engine, "states", [ "last_updated_ts FLOAT(4)", @@ -253,47 +232,44 @@ async def test_validate_db_schema_precision_with_broken_schema( ], ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == {"states.double precision"} # Now repair the schema - await instance.async_add_executor_job( - correct_db_schema_precision, instance, States, schema_errors + await recorder_mock.async_add_executor_job( + correct_db_schema_precision, recorder_mock, States, schema_errors ) # Now validate the schema again - schema_errors = await instance.async_add_executor_job( + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert schema_errors == set() +@pytest.mark.skip_on_db_engine(["postgresql", "sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_validate_db_schema_precision_with_unrepairable_broken_schema( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, recorder_db_url: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test validating DB schema when the schema is broken and cannot be repaired.""" - if not recorder_db_url.startswith("mysql://"): - # This problem only happens on MySQL - return - await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) - instance = get_instance(hass) - session_maker = instance.get_session + session_maker = recorder_mock.get_session def _break_states_schema(): _modify_columns( session_maker, - instance.engine, + recorder_mock.engine, "states", [ "state VARCHAR(255) NOT NULL", @@ -302,10 +278,10 @@ async def test_validate_db_schema_precision_with_unrepairable_broken_schema( ], ) - await instance.async_add_executor_job(_break_states_schema) - schema_errors = await instance.async_add_executor_job( + await recorder_mock.async_add_executor_job(_break_states_schema) + schema_errors = await recorder_mock.async_add_executor_job( validate_db_schema_precision, - instance, + recorder_mock, States, ) assert "Error when validating DB schema" in caplog.text diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index c72b1ac830b..aee35fceb80 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -79,10 +79,18 @@ async def async_block_recorder(hass: HomeAssistant, seconds: float) -> None: await event.wait() +def get_start_time(start: datetime) -> datetime: + """Calculate a valid start time for statistics.""" + start_minutes = start.minute - start.minute % 5 + return start.replace(minute=start_minutes, second=0, microsecond=0) + + def do_adhoc_statistics(hass: HomeAssistant, **kwargs: Any) -> None: """Trigger an adhoc statistics run.""" if not (start := kwargs.get("start")): start = statistics.get_start_time() + elif (start.minute % 5) != 0 or start.second != 0 or start.microsecond != 0: + raise ValueError(f"Statistics must start on 5 minute boundary got {start}") get_instance(hass).queue_task(StatisticsTask(start, False)) @@ -291,11 +299,11 @@ def record_states(hass): wait_recording_done(hass) return hass.states.get(entity_id) - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=15 * 5) + four = three + timedelta(seconds=14 * 5) states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} with freeze_time(one) as freezer: @@ -416,6 +424,14 @@ def get_schema_module_path(schema_version_postfix: str) -> str: return f"tests.components.recorder.db_schema_{schema_version_postfix}" +@dataclass(slots=True) +class MockMigrationTask(migration.MigrationTask): + """Mock migration task which does nothing.""" + + def run(self, instance: Recorder) -> None: + """Run migration task.""" + + @contextmanager def old_db_schema(schema_version_postfix: str) -> Iterator[None]: """Fixture to initialize the db with the old schema.""" @@ -434,7 +450,7 @@ def old_db_schema(schema_version_postfix: str) -> Iterator[None]: patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch( CREATE_ENGINE_TARGET, new=partial( diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index 4db573fa65f..f562ba163ba 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -1,14 +1,43 @@ """Fixtures for the recorder component tests.""" -from unittest.mock import patch +from collections.abc import AsyncGenerator, Generator +from dataclasses import dataclass +from functools import partial +import threading +from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components import recorder +from homeassistant.components.recorder import db_schema +from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant +def pytest_configure(config): + """Add custom skip_on_db_engine marker.""" + config.addinivalue_line( + "markers", + "skip_on_db_engine(engine): mark test to run only on named DB engine(s)", + ) + + +@pytest.fixture +def skip_by_db_engine(request: pytest.FixtureRequest, recorder_db_url: str) -> None: + """Fixture to skip tests on unsupported DB engines. + + Mark the test with @pytest.mark.skip_on_db_engine("mysql") to skip on mysql, or + @pytest.mark.skip_on_db_engine(["mysql", "sqlite"]) to skip on mysql and sqlite. + """ + if request.node.get_closest_marker("skip_on_db_engine"): + skip_on_db_engine = request.node.get_closest_marker("skip_on_db_engine").args[0] + if isinstance(skip_on_db_engine, str): + skip_on_db_engine = [skip_on_db_engine] + db_engine = recorder_db_url.partition("://")[0] + if db_engine in skip_on_db_engine: + pytest.skip(f"skipped for DB engine: {db_engine}") + + @pytest.fixture def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None]: """Patch the recorder dialect.""" @@ -22,3 +51,76 @@ def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None "homeassistant.components.recorder.Recorder.dialect_name", db_engine ): yield + + +@dataclass(slots=True) +class InstrumentedMigration: + """Container to aid controlling migration progress.""" + + migration_done: threading.Event + migration_stall: threading.Event + migration_started: threading.Event + migration_version: int | None + apply_update_mock: Mock + + +@pytest.fixture +async def instrument_migration( + hass: HomeAssistant, +) -> AsyncGenerator[InstrumentedMigration]: + """Instrument recorder migration.""" + + real_migrate_schema_live = recorder.migration.migrate_schema_live + real_migrate_schema_non_live = recorder.migration.migrate_schema_non_live + real_apply_update = recorder.migration._apply_update + + def _instrument_migrate_schema(real_func, *args): + """Control migration progress and check results.""" + instrumented_migration.migration_started.set() + + try: + migration_result = real_func(*args) + except Exception: + instrumented_migration.migration_done.set() + raise + + # Check and report the outcome of the migration; if migration fails + # the recorder will silently create a new database. + with session_scope(hass=hass, read_only=True) as session: + res = ( + session.query(db_schema.SchemaChanges) + .order_by(db_schema.SchemaChanges.change_id.desc()) + .first() + ) + instrumented_migration.migration_version = res.schema_version + instrumented_migration.migration_done.set() + return migration_result + + def _instrument_apply_update(*args): + """Control migration progress.""" + instrumented_migration.migration_stall.wait() + real_apply_update(*args) + + with ( + patch( + "homeassistant.components.recorder.migration.migrate_schema_live", + wraps=partial(_instrument_migrate_schema, real_migrate_schema_live), + ), + patch( + "homeassistant.components.recorder.migration.migrate_schema_non_live", + wraps=partial(_instrument_migrate_schema, real_migrate_schema_non_live), + ), + patch( + "homeassistant.components.recorder.migration._apply_update", + wraps=_instrument_apply_update, + ) as apply_update_mock, + ): + instrumented_migration = InstrumentedMigration( + migration_done=threading.Event(), + migration_stall=threading.Event(), + migration_started=threading.Event(), + migration_version=None, + apply_update_mock=apply_update_mock, + ) + + yield instrumented_migration diff --git a/tests/components/recorder/db_schema_30.py b/tests/components/recorder/db_schema_30.py index b82213cbc89..2668f610dfd 100644 --- a/tests/components/recorder/db_schema_30.py +++ b/tests/components/recorder/db_schema_30.py @@ -33,6 +33,7 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -109,7 +110,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 15b56e2fc86..60f4f733ec0 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -33,6 +33,7 @@ from sqlalchemy import ( type_coerce, ) from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect from sqlalchemy.orm import aliased, declarative_base, relationship from sqlalchemy.orm.session import Session @@ -109,7 +110,7 @@ STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc] """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) diff --git a/tests/components/recorder/db_schema_42.py b/tests/components/recorder/db_schema_42.py index c0dfc70571d..99bdbb28f2c 100644 --- a/tests/components/recorder/db_schema_42.py +++ b/tests/components/recorder/db_schema_42.py @@ -171,7 +171,7 @@ def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """Offload the datetime parsing to ciso8601.""" return lambda value: None if value is None else ciso8601.parse_datetime(value) @@ -179,7 +179,7 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): class NativeLargeBinary(LargeBinary): """A faster version of LargeBinary for engines that support python bytes natively.""" - def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def] + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: """No conversion needed for engines that support native bytes.""" return None diff --git a/tests/components/recorder/db_schema_43.py b/tests/components/recorder/db_schema_43.py new file mode 100644 index 00000000000..26d8ecd6856 --- /dev/null +++ b/tests/components/recorder/db_schema_43.py @@ -0,0 +1,889 @@ +"""Models for SQLAlchemy. + +This file contains the model definitions for schema version 43. +It is used to test the schema migration logic. +""" + +from __future__ import annotations + +from collections.abc import Callable +from datetime import datetime, timedelta +import logging +import time +from typing import Any, Self, cast + +import ciso8601 +from fnv_hash_fast import fnv1a_32 +from sqlalchemy import ( + CHAR, + JSON, + BigInteger, + Boolean, + ColumnElement, + DateTime, + Float, + ForeignKey, + Identity, + Index, + Integer, + LargeBinary, + SmallInteger, + String, + Text, + case, + type_coerce, +) +from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite +from sqlalchemy.engine.interfaces import Dialect +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship +from sqlalchemy.types import TypeDecorator + +from homeassistant.components.recorder.const import ( + ALL_DOMAIN_EXCLUDE_ATTRS, + SupportedDialect, +) +from homeassistant.components.recorder.models import ( + StatisticData, + StatisticDataTimestamp, + StatisticMetaData, + bytes_to_ulid_or_none, + bytes_to_uuid_hex_or_none, + datetime_to_timestamp_or_none, + process_timestamp, + ulid_to_bytes_or_none, + uuid_hex_to_bytes_or_none, +) +from homeassistant.components.sensor import ATTR_STATE_CLASS +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + ATTR_FRIENDLY_NAME, + ATTR_UNIT_OF_MEASUREMENT, + MATCH_ALL, + MAX_LENGTH_EVENT_EVENT_TYPE, + MAX_LENGTH_STATE_ENTITY_ID, + MAX_LENGTH_STATE_STATE, +) +from homeassistant.core import Context, Event, EventOrigin, EventStateChangedData, State +from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null +import homeassistant.util.dt as dt_util +from homeassistant.util.json import ( + JSON_DECODE_EXCEPTIONS, + json_loads, + json_loads_object, +) + + +# SQLAlchemy Schema +class Base(DeclarativeBase): + """Base class for tables.""" + + +SCHEMA_VERSION = 43 + +_LOGGER = logging.getLogger(__name__) + +TABLE_EVENTS = "events" +TABLE_EVENT_DATA = "event_data" +TABLE_EVENT_TYPES = "event_types" +TABLE_STATES = "states" +TABLE_STATE_ATTRIBUTES = "state_attributes" +TABLE_STATES_META = "states_meta" +TABLE_RECORDER_RUNS = "recorder_runs" +TABLE_SCHEMA_CHANGES = "schema_changes" +TABLE_STATISTICS = "statistics" +TABLE_STATISTICS_META = "statistics_meta" +TABLE_STATISTICS_RUNS = "statistics_runs" +TABLE_STATISTICS_SHORT_TERM = "statistics_short_term" +TABLE_MIGRATION_CHANGES = "migration_changes" + +STATISTICS_TABLES = ("statistics", "statistics_short_term") + +MAX_STATE_ATTRS_BYTES = 16384 +MAX_EVENT_DATA_BYTES = 32768 + +PSQL_DIALECT = SupportedDialect.POSTGRESQL + +ALL_TABLES = [ + TABLE_STATES, + TABLE_STATE_ATTRIBUTES, + TABLE_EVENTS, + TABLE_EVENT_DATA, + TABLE_EVENT_TYPES, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, + TABLE_MIGRATION_CHANGES, + TABLE_STATES_META, + TABLE_STATISTICS, + TABLE_STATISTICS_META, + TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, +] + +TABLES_TO_CHECK = [ + TABLE_STATES, + TABLE_EVENTS, + TABLE_RECORDER_RUNS, + TABLE_SCHEMA_CHANGES, +] + +LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts" +METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts" +EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin" +STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin" +LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id" +LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts" +CONTEXT_ID_BIN_MAX_LENGTH = 16 + +MYSQL_COLLATE = "utf8mb4_unicode_ci" +MYSQL_DEFAULT_CHARSET = "utf8mb4" +MYSQL_ENGINE = "InnoDB" + +_DEFAULT_TABLE_ARGS = { + "mysql_default_charset": MYSQL_DEFAULT_CHARSET, + "mysql_collate": MYSQL_COLLATE, + "mysql_engine": MYSQL_ENGINE, + "mariadb_default_charset": MYSQL_DEFAULT_CHARSET, + "mariadb_collate": MYSQL_COLLATE, + "mariadb_engine": MYSQL_ENGINE, +} + +_MATCH_ALL_KEEP = { + ATTR_DEVICE_CLASS, + ATTR_STATE_CLASS, + ATTR_UNIT_OF_MEASUREMENT, + ATTR_FRIENDLY_NAME, +} + + +class UnusedDateTime(DateTime): + """An unused column type that behaves like a datetime.""" + + +class Unused(CHAR): + """An unused column type that behaves like a string.""" + + +@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +@compiles(Unused, "mysql", "mariadb", "sqlite") # type: ignore[misc,no-untyped-call] +def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite.""" + return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite) + + +@compiles(Unused, "postgresql") # type: ignore[misc,no-untyped-call] +def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str: + """Compile Unused as CHAR(1) on postgresql.""" + return "CHAR(1)" # Uses 1 byte + + +class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): + """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex.""" + + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + """Offload the datetime parsing to ciso8601.""" + return lambda value: None if value is None else ciso8601.parse_datetime(value) + + +class NativeLargeBinary(LargeBinary): + """A faster version of LargeBinary for engines that support python bytes natively.""" + + def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None: + """No conversion needed for engines that support native bytes.""" + return None + + +# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 +# for sqlite and postgresql we use a bigint +UINT_32_TYPE = BigInteger().with_variant( + mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call] + "mysql", + "mariadb", +) +JSON_VARIANT_CAST = Text().with_variant( + postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +JSONB_VARIANT_CAST = Text().with_variant( + postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call] + "postgresql", +) +DATETIME_TYPE = ( + DateTime(timezone=True) + .with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call] +) +DOUBLE_TYPE = ( + Float() + .with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call] + .with_variant(oracle.DOUBLE_PRECISION(), "oracle") + .with_variant(postgresql.DOUBLE_PRECISION(), "postgresql") +) +UNUSED_LEGACY_COLUMN = Unused(0) +UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True) +UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger() +DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION" +CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant( + NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite" +) + +TIMESTAMP_TYPE = DOUBLE_TYPE + + +class JSONLiteral(JSON): + """Teach SA how to literalize json.""" + + def literal_processor(self, dialect: Dialect) -> Callable[[Any], str]: + """Processor to convert a value to JSON.""" + + def process(value: Any) -> str: + """Dump json.""" + return JSON_DUMP(value) + + return process + + +EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote] + + +class Events(Base): + """Event history data.""" + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index( + "ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts" + ), + Index( + EVENTS_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_EVENTS + event_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column(SmallInteger) + time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + data_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_data.data_id"), index=True + ) + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + event_type_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("event_types.event_type_id") + ) + event_data_rel: Mapped[EventData | None] = relationship("EventData") + event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @property + def _time_fired_isotime(self) -> str | None: + """Return time_fired as an isotime string.""" + date_time: datetime | None + if self.time_fired_ts is not None: + date_time = dt_util.utc_from_timestamp(self.time_fired_ts) + else: + date_time = process_timestamp(self.time_fired) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event) -> Events: + """Create an event database object from a native event.""" + context = event.context + return Events( + event_type=None, + event_data=None, + origin_idx=event.origin.idx, + time_fired=None, + time_fired_ts=event.time_fired_timestamp, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), + ) + + def to_native(self, validate_entity_id: bool = True) -> Event | None: + """Convert to a native HA Event.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + return Event( + self.event_type or "", + json_loads_object(self.event_data) if self.event_data else {}, + EventOrigin(self.origin) + if self.origin + else EVENT_ORIGIN_ORDER[self.origin_idx or 0], + self.time_fired_ts or 0, + context=context, + ) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting to event: %s", self) + return None + + +class EventData(Base): + """Event data history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_DATA + data_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_data: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + @staticmethod + def shared_data_bytes_from_event( + event: Event, dialect: SupportedDialect | None + ) -> bytes: + """Create shared_data from an event.""" + if dialect == SupportedDialect.POSTGRESQL: + bytes_result = json_bytes_strip_null(event.data) + bytes_result = json_bytes(event.data) + if len(bytes_result) > MAX_EVENT_DATA_BYTES: + _LOGGER.warning( + "Event data for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Event data " + "will not be stored", + event.event_type, + MAX_EVENT_DATA_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_data_bytes(shared_data_bytes: bytes) -> int: + """Return the hash of json encoded shared data.""" + return fnv1a_32(shared_data_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to an event data dictionary.""" + shared_data = self.shared_data + if shared_data is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_data)) + except JSON_DECODE_EXCEPTIONS: + _LOGGER.exception("Error converting row to event data: %s", self) + return {} + + +class EventTypes(Base): + """Event type history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_EVENT_TYPES + event_type_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + event_type: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class States(Base): + """State change history.""" + + __table_args__ = ( + # Used for fetching the state of entities at a specific time + # (get_states in history.py) + Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"), + Index( + STATES_CONTEXT_ID_BIN_INDEX, + "context_id_bin", + mysql_length=CONTEXT_ID_BIN_MAX_LENGTH, + mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATES + state_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE)) + attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN) + last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + last_reported_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_updated_ts: Mapped[float | None] = mapped_column( + TIMESTAMP_TYPE, default=time.time, index=True + ) + old_state_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states.state_id"), index=True + ) + attributes_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("state_attributes.attributes_id"), index=True + ) + context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN) + origin_idx: Mapped[int | None] = mapped_column( + SmallInteger + ) # 0 is local, 1 is remote + old_state: Mapped[States | None] = relationship("States", remote_side=[state_id]) + state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes") + context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE) + metadata_id: Mapped[int | None] = mapped_column( + Integer, ForeignKey("states_meta.metadata_id") + ) + states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta") + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @property + def _last_updated_isotime(self) -> str | None: + """Return last_updated as an isotime string.""" + date_time: datetime | None + if self.last_updated_ts is not None: + date_time = dt_util.utc_from_timestamp(self.last_updated_ts) + else: + date_time = process_timestamp(self.last_updated) + if date_time is None: + return None + return date_time.isoformat(sep=" ", timespec="seconds") + + @staticmethod + def from_event(event: Event[EventStateChangedData]) -> States: + """Create object from a state_changed event.""" + state = event.data["new_state"] + # None state means the state was removed from the state machine + if state is None: + state_value = "" + last_updated_ts = event.time_fired_timestamp + last_changed_ts = None + last_reported_ts = None + else: + state_value = state.state + last_updated_ts = state.last_updated_timestamp + if state.last_updated == state.last_changed: + last_changed_ts = None + else: + last_changed_ts = state.last_changed_timestamp + if state.last_updated == state.last_reported: + last_reported_ts = None + else: + last_reported_ts = state.last_reported_timestamp + context = event.context + return States( + state=state_value, + entity_id=event.data["entity_id"], + attributes=None, + context_id=None, + context_id_bin=ulid_to_bytes_or_none(context.id), + context_user_id=None, + context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), + context_parent_id=None, + context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), + origin_idx=event.origin.idx, + last_updated=None, + last_changed=None, + last_updated_ts=last_updated_ts, + last_changed_ts=last_changed_ts, + last_reported_ts=last_reported_ts, + ) + + def to_native(self, validate_entity_id: bool = True) -> State | None: + """Convert to an HA state object.""" + context = Context( + id=bytes_to_ulid_or_none(self.context_id_bin), + user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin), + parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin), + ) + try: + attrs = json_loads_object(self.attributes) if self.attributes else {} + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state: %s", self) + return None + last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts: + last_changed = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + else: + last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0) + if ( + self.last_reported_ts is None + or self.last_reported_ts == self.last_updated_ts + ): + last_reported = dt_util.utc_from_timestamp(self.last_updated_ts or 0) + else: + last_reported = dt_util.utc_from_timestamp(self.last_reported_ts or 0) + return State( + self.entity_id or "", + self.state, # type: ignore[arg-type] + # Join the state_attributes table on attributes_id to get the attributes + # for newer states + attrs, + last_changed=last_changed, + last_reported=last_reported, + last_updated=last_updated, + context=context, + validate_entity_id=validate_entity_id, + ) + + +class StateAttributes(Base): + """State attribute change history.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATE_ATTRIBUTES + attributes_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True) + # Note that this is not named attributes to avoid confusion with the states table + shared_attrs: Mapped[str | None] = mapped_column( + Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb") + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + @staticmethod + def shared_attrs_bytes_from_event( + event: Event[EventStateChangedData], + dialect: SupportedDialect | None, + ) -> bytes: + """Create shared_attrs from a state_changed event.""" + # None state means the state was removed from the state machine + if (state := event.data["new_state"]) is None: + return b"{}" + if state_info := state.state_info: + unrecorded_attributes = state_info["unrecorded_attributes"] + exclude_attrs = { + *ALL_DOMAIN_EXCLUDE_ATTRS, + *unrecorded_attributes, + } + if MATCH_ALL in unrecorded_attributes: + # Don't exclude device class, state class, unit of measurement + # or friendly name when using the MATCH_ALL exclude constant + exclude_attrs.update(state.attributes) + exclude_attrs -= _MATCH_ALL_KEEP + else: + exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS + encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes + bytes_result = encoder( + {k: v for k, v in state.attributes.items() if k not in exclude_attrs} + ) + if len(bytes_result) > MAX_STATE_ATTRS_BYTES: + _LOGGER.warning( + "State attributes for %s exceed maximum size of %s bytes. " + "This can cause database performance issues; Attributes " + "will not be stored", + state.entity_id, + MAX_STATE_ATTRS_BYTES, + ) + return b"{}" + return bytes_result + + @staticmethod + def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int: + """Return the hash of json encoded shared attributes.""" + return fnv1a_32(shared_attrs_bytes) + + def to_native(self) -> dict[str, Any]: + """Convert to a state attributes dictionary.""" + shared_attrs = self.shared_attrs + if shared_attrs is None: + return {} + try: + return cast(dict[str, Any], json_loads(shared_attrs)) + except JSON_DECODE_EXCEPTIONS: + # When json_loads fails + _LOGGER.exception("Error converting row to state attributes: %s", self) + return {} + + +class StatesMeta(Base): + """Metadata for states.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATES_META + metadata_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + entity_id: Mapped[str | None] = mapped_column( + String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True + ) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsBase: + """Statistics base class.""" + + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time) + metadata_id: Mapped[int | None] = mapped_column( + Integer, + ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), + ) + start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True) + mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + min: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + max: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN) + last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE) + state: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE) + + duration: timedelta + + @classmethod + def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: + """Create object from a statistics with datatime objects.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=dt_util.utc_to_timestamp(stats["start"]), + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + @classmethod + def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self: + """Create object from a statistics with timestamps.""" + return cls( # type: ignore[call-arg] + metadata_id=metadata_id, + created=None, + created_ts=time.time(), + start=None, + start_ts=stats["start_ts"], + mean=stats.get("mean"), + min=stats.get("min"), + max=stats.get("max"), + last_reset=None, + last_reset_ts=stats.get("last_reset_ts"), + state=stats.get("state"), + sum=stats.get("sum"), + ) + + +class Statistics(Base, StatisticsBase): + """Long term statistics.""" + + duration = timedelta(hours=1) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATISTICS + + +class StatisticsShortTerm(Base, StatisticsBase): + """Short term statistics.""" + + duration = timedelta(minutes=5) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index( + "ix_statistics_short_term_statistic_id_start_ts", + "metadata_id", + "start_ts", + unique=True, + ), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_STATISTICS_SHORT_TERM + + +class StatisticsMeta(Base): + """Statistics meta data.""" + + __table_args__ = (_DEFAULT_TABLE_ARGS,) + __tablename__ = TABLE_STATISTICS_META + id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + statistic_id: Mapped[str | None] = mapped_column( + String(255), index=True, unique=True + ) + source: Mapped[str | None] = mapped_column(String(32)) + unit_of_measurement: Mapped[str | None] = mapped_column(String(255)) + has_mean: Mapped[bool | None] = mapped_column(Boolean) + has_sum: Mapped[bool | None] = mapped_column(Boolean) + name: Mapped[str | None] = mapped_column(String(255)) + + @staticmethod + def from_meta(meta: StatisticMetaData) -> StatisticsMeta: + """Create object from meta data.""" + return StatisticsMeta(**meta) + + +class RecorderRuns(Base): + """Representation of recorder run.""" + + __table_args__ = ( + Index("ix_recorder_runs_start_end", "start", "end"), + _DEFAULT_TABLE_ARGS, + ) + __tablename__ = TABLE_RECORDER_RUNS + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE) + closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False) + created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + end = ( + f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None + ) + return ( + f"" + ) + + def to_native(self, validate_entity_id: bool = True) -> Self: + """Return self, native format is this model.""" + return self + + +class MigrationChanges(Base): + """Representation of migration changes.""" + + __tablename__ = TABLE_MIGRATION_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + migration_id: Mapped[str] = mapped_column(String(255), primary_key=True) + version: Mapped[int] = mapped_column(SmallInteger) + + +class SchemaChanges(Base): + """Representation of schema version changes.""" + + __tablename__ = TABLE_SCHEMA_CHANGES + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + change_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + schema_version: Mapped[int | None] = mapped_column(Integer) + changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + "" + ) + + +class StatisticsRuns(Base): + """Representation of statistics run.""" + + __tablename__ = TABLE_STATISTICS_RUNS + __table_args__ = (_DEFAULT_TABLE_ARGS,) + + run_id: Mapped[int] = mapped_column(Integer, Identity(), primary_key=True) + start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True) + + def __repr__(self) -> str: + """Return string representation of instance for debugging.""" + return ( + f"" + ) + + +EVENT_DATA_JSON = type_coerce( + EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) +OLD_FORMAT_EVENT_DATA_JSON = type_coerce( + Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True) +) + +SHARED_ATTRS_JSON = type_coerce( + StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) +OLD_FORMAT_ATTRS_JSON = type_coerce( + States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True) +) + +ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"] +OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"] +DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"] +OLD_STATE = aliased(States, name="old_state") + +SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case( + (StateAttributes.shared_attrs.is_(None), States.attributes), + else_=StateAttributes.shared_attrs, +).label("attributes") +SHARED_DATA_OR_LEGACY_EVENT_DATA = case( + (EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data +).label("event_data") diff --git a/tests/components/recorder/db_schema_9.py b/tests/components/recorder/db_schema_9.py new file mode 100644 index 00000000000..f9a8c2d2cad --- /dev/null +++ b/tests/components/recorder/db_schema_9.py @@ -0,0 +1,233 @@ +"""Models for SQLAlchemy. + +This file contains the model definitions for schema version 9, +used by Home Assistant Core 0.119.0. +It is used to test the schema migration logic. +""" + +import json +import logging + +from sqlalchemy import ( + Boolean, + Column, + DateTime, + ForeignKey, + Index, + Integer, + String, + Text, + distinct, +) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship +from sqlalchemy.orm.session import Session + +from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id +from homeassistant.helpers.json import JSONEncoder +import homeassistant.util.dt as dt_util + +# SQLAlchemy Schema +Base = declarative_base() + +SCHEMA_VERSION = 9 + +_LOGGER = logging.getLogger(__name__) + +DB_TIMEZONE = "+00:00" + +TABLE_EVENTS = "events" +TABLE_STATES = "states" +TABLE_RECORDER_RUNS = "recorder_runs" +TABLE_SCHEMA_CHANGES = "schema_changes" + +ALL_TABLES = [TABLE_EVENTS, TABLE_STATES, TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES] + + +class Events(Base): # type: ignore[valid-type,misc] + """Event history data.""" + + __tablename__ = TABLE_EVENTS + event_id = Column(Integer, primary_key=True) + event_type = Column(String(32)) + event_data = Column(Text) + origin = Column(String(32)) + time_fired = Column(DateTime(timezone=True), index=True) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + context_id = Column(String(36), index=True) + context_user_id = Column(String(36), index=True) + context_parent_id = Column(String(36), index=True) + + __table_args__ = ( + # Used for fetching events at a specific time + # see logbook + Index("ix_events_event_type_time_fired", "event_type", "time_fired"), + ) + + @staticmethod + def from_event(event, event_data=None): + """Create an event database object from a native event.""" + return Events( + event_type=event.event_type, + event_data=event_data or json.dumps(event.data, cls=JSONEncoder), + origin=str(event.origin.value), + time_fired=event.time_fired, + context_id=event.context.id, + context_user_id=event.context.user_id, + context_parent_id=event.context.parent_id, + ) + + def to_native(self, validate_entity_id=True): + """Convert to a natve HA Event.""" + context = Context( + id=self.context_id, + user_id=self.context_user_id, + parent_id=self.context_parent_id, + ) + try: + return Event( + self.event_type, + json.loads(self.event_data), + EventOrigin(self.origin), + process_timestamp(self.time_fired), + context=context, + ) + except ValueError: + # When json.loads fails + _LOGGER.exception("Error converting to event: %s", self) + return None + + +class States(Base): # type: ignore[valid-type,misc] + """State change history.""" + + __tablename__ = TABLE_STATES + state_id = Column(Integer, primary_key=True) + domain = Column(String(64)) + entity_id = Column(String(255)) + state = Column(String(255)) + attributes = Column(Text) + event_id = Column(Integer, ForeignKey("events.event_id"), index=True) + last_changed = Column(DateTime(timezone=True), default=dt_util.utcnow) + last_updated = Column(DateTime(timezone=True), default=dt_util.utcnow, index=True) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + old_state_id = Column(Integer, ForeignKey("states.state_id")) + event = relationship("Events", uselist=False) + old_state = relationship("States", remote_side=[state_id]) + + __table_args__ = ( + # Used for fetching the state of entities at a specific time + # (get_states in history.py) + Index("ix_states_entity_id_last_updated", "entity_id", "last_updated"), + ) + + @staticmethod + def from_event(event): + """Create object from a state_changed event.""" + entity_id = event.data["entity_id"] + state = event.data.get("new_state") + + dbstate = States(entity_id=entity_id) + + # State got deleted + if state is None: + dbstate.state = "" + dbstate.domain = split_entity_id(entity_id)[0] + dbstate.attributes = "{}" + dbstate.last_changed = event.time_fired + dbstate.last_updated = event.time_fired + else: + dbstate.domain = state.domain + dbstate.state = state.state + dbstate.attributes = json.dumps(dict(state.attributes), cls=JSONEncoder) + dbstate.last_changed = state.last_changed + dbstate.last_updated = state.last_updated + + return dbstate + + def to_native(self, validate_entity_id=True): + """Convert to an HA state object.""" + try: + return State( + self.entity_id, + self.state, + json.loads(self.attributes), + process_timestamp(self.last_changed), + process_timestamp(self.last_updated), + # Join the events table on event_id to get the context instead + # as it will always be there for state_changed events + context=Context(id=None), + validate_entity_id=validate_entity_id, + ) + except ValueError: + # When json.loads fails + _LOGGER.exception("Error converting row to state: %s", self) + return None + + +class RecorderRuns(Base): # type: ignore[valid-type,misc] + """Representation of recorder run.""" + + __tablename__ = TABLE_RECORDER_RUNS + run_id = Column(Integer, primary_key=True) + start = Column(DateTime(timezone=True), default=dt_util.utcnow) + end = Column(DateTime(timezone=True)) + closed_incorrect = Column(Boolean, default=False) + created = Column(DateTime(timezone=True), default=dt_util.utcnow) + + __table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),) + + def entity_ids(self, point_in_time=None): + """Return the entity ids that existed in this run. + + Specify point_in_time if you want to know which existed at that point + in time inside the run. + """ + session = Session.object_session(self) + + assert session is not None, "RecorderRuns need to be persisted" + + query = session.query(distinct(States.entity_id)).filter( + States.last_updated >= self.start + ) + + if point_in_time is not None: + query = query.filter(States.last_updated < point_in_time) + elif self.end is not None: + query = query.filter(States.last_updated < self.end) + + return [row[0] for row in query] + + def to_native(self, validate_entity_id=True): + """Return self, native format is this model.""" + return self + + +class SchemaChanges(Base): # type: ignore[valid-type,misc] + """Representation of schema version changes.""" + + __tablename__ = TABLE_SCHEMA_CHANGES + change_id = Column(Integer, primary_key=True) + schema_version = Column(Integer) + changed = Column(DateTime(timezone=True), default=dt_util.utcnow) + + +def process_timestamp(ts): + """Process a timestamp into datetime object.""" + if ts is None: + return None + if ts.tzinfo is None: + return ts.replace(tzinfo=dt_util.UTC) + + return dt_util.as_utc(ts) + + +def process_timestamp_to_utc_isoformat(ts): + """Process a timestamp into UTC isotime.""" + if ts is None: + return None + if ts.tzinfo == dt_util.UTC: + return ts.isoformat() + if ts.tzinfo is None: + return f"{ts.isoformat()}{DB_TIMEZONE}" + return ts.astimezone(dt_util.UTC).isoformat() diff --git a/tests/components/recorder/test_entity_registry.py b/tests/components/recorder/test_entity_registry.py index a74992525b1..ad438dcc525 100644 --- a/tests/components/recorder/test_entity_registry.py +++ b/tests/components/recorder/test_entity_registry.py @@ -40,7 +40,7 @@ def _count_entity_id_in_states_meta( @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_filters.py b/tests/components/recorder/test_filters.py index 13a2a325f1e..2841cabda1b 100644 --- a/tests/components/recorder/test_filters.py +++ b/tests/components/recorder/test_filters.py @@ -7,13 +7,8 @@ from homeassistant.components.recorder.filters import ( extract_include_exclude_filter_conf, merge_include_exclude_filters, ) -from homeassistant.helpers.entityfilter import ( - CONF_DOMAINS, - CONF_ENTITIES, - CONF_ENTITY_GLOBS, - CONF_EXCLUDE, - CONF_INCLUDE, -) +from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE +from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS EMPTY_INCLUDE_FILTER = { CONF_INCLUDE: { diff --git a/tests/components/recorder/test_filters_with_entityfilter.py b/tests/components/recorder/test_filters_with_entityfilter.py index 1ee127a9989..97839803619 100644 --- a/tests/components/recorder/test_filters_with_entityfilter.py +++ b/tests/components/recorder/test_filters_with_entityfilter.py @@ -13,14 +13,17 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ATTR_ENTITY_ID, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entityfilter import ( +from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_DOMAINS, CONF_ENTITIES, - CONF_ENTITY_GLOBS, CONF_EXCLUDE, CONF_INCLUDE, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entityfilter import ( + CONF_ENTITY_GLOBS, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py index 9c66d2ee169..d3024df4ed6 100644 --- a/tests/components/recorder/test_filters_with_entityfilter_schema_37.py +++ b/tests/components/recorder/test_filters_with_entityfilter_schema_37.py @@ -1,12 +1,12 @@ """The tests for the recorder filter matching the EntityFilter component.""" +from collections.abc import AsyncGenerator import json from unittest.mock import patch import pytest from sqlalchemy import select from sqlalchemy.engine.row import Row -from typing_extensions import AsyncGenerator from homeassistant.components.recorder import Recorder, get_instance from homeassistant.components.recorder.db_schema import EventData, Events, States @@ -16,14 +16,17 @@ from homeassistant.components.recorder.filters import ( sqlalchemy_filter_from_include_exclude_conf, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.const import ATTR_ENTITY_ID, STATE_ON -from homeassistant.core import HomeAssistant -from homeassistant.helpers.entityfilter import ( +from homeassistant.const import ( + ATTR_ENTITY_ID, CONF_DOMAINS, CONF_ENTITIES, - CONF_ENTITY_GLOBS, CONF_EXCLUDE, CONF_INCLUDE, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entityfilter import ( + CONF_ENTITY_GLOBS, convert_include_exclude_filter, ) diff --git a/tests/components/recorder/test_history.py b/tests/components/recorder/test_history.py index af846353467..3923c72107a 100644 --- a/tests/components/recorder/test_history.py +++ b/tests/components/recorder/test_history.py @@ -47,7 +47,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -891,14 +891,17 @@ def record_states( return zero, four, states +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_state_changes_during_period_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -957,14 +960,17 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -1007,14 +1013,17 @@ async def test_get_states_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25_multiple_entities( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) diff --git a/tests/components/recorder/test_history_db_schema_30.py b/tests/components/recorder/test_history_db_schema_30.py index e5e80b0cdb9..0e5f6cf7f79 100644 --- a/tests/components/recorder/test_history_db_schema_30.py +++ b/tests/components/recorder/test_history_db_schema_30.py @@ -33,7 +33,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_32.py b/tests/components/recorder/test_history_db_schema_32.py index 8a3e6a58ab3..3ee6edd8e1e 100644 --- a/tests/components/recorder/test_history_db_schema_32.py +++ b/tests/components/recorder/test_history_db_schema_32.py @@ -33,7 +33,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" diff --git a/tests/components/recorder/test_history_db_schema_42.py b/tests/components/recorder/test_history_db_schema_42.py index 083d4c0930e..5d9444e9cfe 100644 --- a/tests/components/recorder/test_history_db_schema_42.py +++ b/tests/components/recorder/test_history_db_schema_42.py @@ -42,7 +42,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -893,14 +893,17 @@ def record_states( return zero, four, states +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_state_changes_during_period_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -959,14 +962,17 @@ async def test_state_changes_during_period_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) @@ -1009,14 +1015,17 @@ async def test_get_states_query_during_migration_to_schema_25( assert state.attributes == {"name": "the light"} +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_get_states_query_during_migration_to_schema_25_multiple_entities( hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test we can query data prior to schema 25 and during migration to schema 25.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop table state_attributes - return + """Test we can query data prior to schema 25 and during migration to schema 25. + + This test doesn't run on MySQL / MariaDB / Postgresql; we can't drop the + state_attributes table. + """ instance = recorder.get_instance(hass) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index e1b22b2c245..3cd4c3ab4b6 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -3,9 +3,10 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from datetime import datetime, timedelta -from pathlib import Path import sqlite3 +import sys import threading from typing import Any, cast from unittest.mock import MagicMock, Mock, patch @@ -14,7 +15,6 @@ from freezegun.api import FrozenDateTimeFactory import pytest from sqlalchemy.exc import DatabaseError, OperationalError, SQLAlchemyError from sqlalchemy.pool import QueuePool -from typing_extensions import Generator from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -26,7 +26,6 @@ from homeassistant.components.recorder import ( CONF_DB_URL, CONFIG_SCHEMA, DOMAIN, - SQLITE_URL_PREFIX, Recorder, db_schema, get_instance, @@ -104,7 +103,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -140,19 +139,16 @@ def _default_recorder(hass): ) +@pytest.mark.parametrize("persistent_database", [True]) async def test_shutdown_before_startup_finishes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, ) -> None: - """Test shutdown before recorder starts is clean.""" - if recorder_db_url == "sqlite://": - # On-disk database because this test does not play nice with the - # MutexPool - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + """Test shutdown before recorder starts is clean. + + On-disk database because this test does not play nice with the MutexPool. + """ config = { - recorder.CONF_DB_URL: recorder_db_url, recorder.CONF_COMMIT_INTERVAL: 1, } hass.set_state(CoreState.not_running) @@ -905,16 +901,19 @@ async def test_saving_event_with_oversized_data( hass.bus.async_fire("test_event", event_data) hass.bus.async_fire("test_event_too_big", massive_dict) await async_wait_recording_done(hass) - events = {} with session_scope(hass=hass, read_only=True) as session: - for _, data, event_type in ( - session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) - ): - events[event_type] = data + events = { + event_type: data + for _, data, event_type in ( + session.query( + Events.event_id, EventData.shared_data, EventTypes.event_type + ) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event", "test_event_too_big"])) + ) + } assert "test_event_too_big" in caplog.text @@ -932,18 +931,19 @@ async def test_saving_event_invalid_context_ulid( event_data = {"test_attr": 5, "test_attr_10": "nice"} hass.bus.async_fire("test_event", event_data, context=Context(id="invalid")) await async_wait_recording_done(hass) - events = {} with session_scope(hass=hass, read_only=True) as session: - for _, data, event_type in ( - session.query(Events.event_id, EventData.shared_data, EventTypes.event_type) - .outerjoin(EventData, Events.data_id == EventData.data_id) - .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) - .where(EventTypes.event_type.in_(["test_event"])) - ): - events[event_type] = data - - assert "invalid" in caplog.text + events = { + event_type: data + for _, data, event_type in ( + session.query( + Events.event_id, EventData.shared_data, EventTypes.event_type + ) + .outerjoin(EventData, Events.data_id == EventData.data_id) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .where(EventTypes.event_type.in_(["test_event"])) + ) + } assert len(events) == 1 assert json_loads(events["test_event"]) == event_data @@ -1365,28 +1365,27 @@ async def test_statistics_runs_initiated( @pytest.mark.freeze_time("2022-09-13 09:00:00+02:00") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_statistics", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( - tmp_path: Path, freezer: FrozenDateTimeFactory + async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory ) -> None: """Test missing statistics are compiled on startup.""" now = dt_util.utcnow().replace(minute=0, second=0, microsecond=0) - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_statistic_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(StatisticsRuns)) - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, wait_recorder=False) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1412,7 +1411,10 @@ async def test_compile_missing_statistics( stats_hourly.append(event) freezer.tick(timedelta(hours=1)) - async with async_test_home_assistant() as hass: + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, wait_recorder=False) as instance, + ): hass.bus.async_listen( EVENT_RECORDER_5MIN_STATISTICS_GENERATED, async_5min_stats_updated_listener ) @@ -1421,13 +1423,9 @@ async def test_compile_missing_statistics( async_hourly_stats_updated_listener, ) - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) - await hass.async_start() await async_wait_recording_done(hass) await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) statistics_runs = await instance.async_add_executor_job( get_statistic_runs, hass ) @@ -1627,24 +1625,24 @@ async def test_service_disable_states_not_recording( ) -async def test_service_disable_run_information_recorded(tmp_path: Path) -> None: +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_service_disable_run_information_recorded( + async_test_recorder: RecorderInstanceGenerator, +) -> None: """Test that runs are still recorded when recorder is disabled.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" def get_recorder_runs(hass: HomeAssistant) -> list: with session_scope(hass=hass, read_only=True) as session: return list(session.query(RecorderRuns)) - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 1 assert db_run_info[0].start is not None @@ -1660,13 +1658,13 @@ async def test_service_disable_run_information_recorded(tmp_path: Path) -> None: await async_wait_recording_done(hass) await hass.async_stop() - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}}) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_start() await async_wait_recording_done(hass) - instance = recorder.get_instance(hass) db_run_info = await instance.async_add_executor_job(get_recorder_runs, hass) assert len(db_run_info) == 2 assert db_run_info[0].start is not None @@ -1681,23 +1679,17 @@ class CannotSerializeMe: """A class that the JSONEncoder cannot serialize.""" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("recorder_config", [{CONF_COMMIT_INTERVAL: 0}]) async def test_database_corruption_while_running( - hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + recorder_mock: Recorder, + recorder_db_url: str, + caplog: pytest.LogCaptureFixture, ) -> None: """Test we can recover from sqlite3 db corruption.""" - - def _create_tmpdir_for_test_db() -> Path: - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - return test_dir.joinpath("test.db") - - test_db_file = await hass.async_add_executor_job(_create_tmpdir_for_test_db) - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl, CONF_COMMIT_INTERVAL: 0}} - ) await hass.async_block_till_done() caplog.clear() @@ -1718,6 +1710,7 @@ async def test_database_corruption_while_running( side_effect=OperationalError("statement", {}, []), ): await async_wait_recording_done(hass) + test_db_file = recorder_db_url.removeprefix("sqlite:///") await hass.async_add_executor_job(corrupt_db_file, test_db_file) await async_wait_recording_done(hass) @@ -1811,23 +1804,21 @@ async def test_entity_id_filter( assert len(db_events) == idx + 1, data +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_unlock( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, ) -> None: - """Test writing events during lock getting written after unlocking.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # Database locking is only used for SQLite - return + """Test writing events during lock getting written after unlocking. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1865,26 +1856,23 @@ async def test_database_lock_and_unlock( assert len(db_events) == 1 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # Database locking is only used for SQLite - return pytest.skip("Database locking is only used for SQLite") + """Test writing events during lock leading to overflow the queue causes the database to unlock. - # Use file DB, in memory DB cannot do write locks. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1898,7 +1886,9 @@ async def test_database_lock_and_overflow( with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() @@ -1931,25 +1921,23 @@ async def test_database_lock_and_overflow( assert start_time.count(":") == 2 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_database_lock_and_overflow_checks_available_memory( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, - recorder_db_url: str, - tmp_path: Path, caplog: pytest.LogCaptureFixture, issue_registry: ir.IssueRegistry, ) -> None: - """Test writing events during lock leading to overflow the queue causes the database to unlock.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - return pytest.skip("Database locking is only used for SQLite") + """Test writing events during lock leading to overflow the queue causes the database to unlock. - # Use file DB, in memory DB cannot do write locks. - if recorder_db_url == "sqlite://": - # Use file DB, in memory DB cannot do write locks. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") + This test is specific for SQLite: Locking is not implemented for other engines. + + Use file DB, in memory DB cannot do write locks. + """ config = { recorder.CONF_COMMIT_INTERVAL: 0, - recorder.CONF_DB_URL: recorder_db_url, } def _get_db_events(): @@ -1960,26 +1948,43 @@ async def test_database_lock_and_overflow_checks_available_memory( ) ) - await async_setup_recorder_instance(hass, config) - await hass.async_block_till_done() + with patch( + "homeassistant.components.recorder.core.QUEUE_CHECK_INTERVAL", + timedelta(seconds=1), + ): + await async_setup_recorder_instance(hass, config) + await hass.async_block_till_done() event_type = "EVENT_TEST" event_types = (event_type,) await async_wait_recording_done(hass) + min_available_memory = 256 * 1024**2 + + out_of_ram = False + + def _get_available_memory(*args: Any, **kwargs: Any) -> int: + nonlocal out_of_ram + return min_available_memory / 2 if out_of_ram else min_available_memory with ( patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 1), + patch.object( + recorder.core, + "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", + min_available_memory, + ), patch.object(recorder.core, "DB_LOCK_QUEUE_CHECK_TIMEOUT", 0.01), patch.object( recorder.core.Recorder, "_available_memory", - return_value=recorder.core.ESTIMATED_QUEUE_ITEM_SIZE * 4, + side_effect=_get_available_memory, ), ): instance = get_instance(hass) - await instance.lock_database() + assert await instance.lock_database() + db_events = await instance.async_add_executor_job(_get_db_events) + assert len(db_events) == 0 # Record up to the extended limit (which takes into account the available memory) for _ in range(2): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -1996,6 +2001,7 @@ async def test_database_lock_and_overflow_checks_available_memory( assert "Database queue backlog reached more than" not in caplog.text + out_of_ram = True # Record beyond the extended limit (which takes into account the available memory) for _ in range(20): event_data = {"test_attr": 5, "test_attr_10": "nice"} @@ -2021,13 +2027,15 @@ async def test_database_lock_and_overflow_checks_available_memory( assert start_time.count(":") == 2 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_lock_timeout( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test locking database timeout when recorder stopped.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite: Locking is not implemented for other engines - return + """Test locking database timeout when recorder stopped. + + This test is specific for SQLite: Locking is not implemented for other engines. + """ hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) @@ -2095,16 +2103,18 @@ async def test_database_connection_keep_alive( assert "Sending keepalive" in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_database_connection_keep_alive_disabled_on_sqlite( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Test we do not do keep alive for sqlite.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, keepalive runs on other engines - return + """Test we do not do keep alive for sqlite. + + This test is specific for SQLite, keepalive runs on other engines. + """ instance = await async_setup_recorder_instance(hass) hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED) @@ -2559,7 +2569,13 @@ async def test_clean_shutdown_when_recorder_thread_raises_during_validate_db_sch assert instance.engine is None -async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("func_to_patch", "expected_setup_result"), + [("migrate_schema_non_live", False), ("migrate_schema_live", False)], +) +async def test_clean_shutdown_when_schema_migration_fails( + hass: HomeAssistant, func_to_patch: str, expected_setup_result: bool +) -> None: """Test we still shutdown cleanly when schema migration fails.""" with ( patch.object( @@ -2570,13 +2586,13 @@ async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch.object( migration, - "migrate_schema", + func_to_patch, side_effect=Exception, ), ): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( + setup_result = await async_setup_component( hass, recorder.DOMAIN, { @@ -2587,6 +2603,7 @@ async def test_clean_shutdown_when_schema_migration_fails(hass: HomeAssistant) - } }, ) + assert setup_result == expected_setup_result await hass.async_block_till_done() instance = recorder.get_instance(hass) @@ -2644,7 +2661,6 @@ async def test_commit_before_commits_pending_writes( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, recorder_db_url: str, - tmp_path: Path, ) -> None: """Test commit_before with a non-zero commit interval. @@ -2714,3 +2730,20 @@ async def test_all_tables_use_default_table_args(hass: HomeAssistant) -> None: """Test that all tables use the default table args.""" for table in db_schema.Base.metadata.tables.values(): assert table.kwargs.items() >= db_schema._DEFAULT_TABLE_ARGS.items() + + +async def test_empty_entity_id( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the recorder can handle an empty entity_id.""" + await async_setup_recorder_instance( + hass, + { + "exclude": {"domains": "hidden_domain"}, + }, + ) + hass.bus.async_fire("hello", {"entity_id": ""}) + await async_wait_recording_done(hass) + assert "Invalid entity ID" not in caplog.text diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index d5b26eba680..dc99ddefa3b 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -4,8 +4,7 @@ import datetime import importlib import sqlite3 import sys -import threading -from unittest.mock import Mock, PropertyMock, call, patch +from unittest.mock import ANY, Mock, PropertyMock, call, patch import pytest from sqlalchemy import create_engine, text @@ -19,7 +18,6 @@ from sqlalchemy.exc import ( from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool -from homeassistant.bootstrap import async_setup_component from homeassistant.components import persistent_notification as pn, recorder from homeassistant.components.recorder import db_schema, migration from homeassistant.components.recorder.db_schema import ( @@ -34,8 +32,17 @@ from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util from .common import async_wait_recording_done, create_engine_test +from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed +from tests.typing import RecorderInstanceGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" def _get_native_states(hass, entity_id): @@ -49,12 +56,13 @@ def _get_native_states(hass, entity_id): return states -async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) -> None: +async def test_schema_update_calls( + hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator +) -> None: """Test that schema migrations occur in correct order.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, @@ -63,26 +71,48 @@ async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) -> "homeassistant.components.recorder.migration._apply_update", wraps=migration._apply_update, ) as update, + patch( + "homeassistant.components.recorder.migration._migrate_schema", + wraps=migration._migrate_schema, + ) as migrate_schema, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False instance = recorder.get_instance(hass) engine = instance.engine session_maker = instance.get_session - update.assert_has_calls( - [ - call(instance, hass, engine, session_maker, version + 1, 0) - for version in range(db_schema.SCHEMA_VERSION) - ] - ) + assert update.mock_calls == [ + call(instance, hass, engine, session_maker, version + 1, 0) + for version in range(db_schema.SCHEMA_VERSION) + ] + assert migrate_schema.mock_calls == [ + call( + instance, + hass, + engine, + session_maker, + migration.SchemaValidationStatus(0, True, set(), 0), + 42, + ), + call( + instance, + hass, + engine, + session_maker, + migration.SchemaValidationStatus(42, True, set(), 0), + db_schema.SCHEMA_VERSION, + ), + ] -async def test_migration_in_progress(recorder_db_url: str, hass: HomeAssistant) -> None: +async def test_migration_in_progress( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, +) -> None: """Test that we can check for migration in progress.""" if recorder_db_url.startswith("mysql://"): # The database drop at the end of this test currently hangs on MySQL @@ -95,38 +125,55 @@ async def test_migration_in_progress(recorder_db_url: str, hass: HomeAssistant) assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, wait_recorder_setup=False ) - await recorder.get_instance(hass).async_migration_event.wait() + await hass.async_add_executor_job(instrument_migration.migration_started.wait) assert recorder.util.async_migration_in_progress(hass) is True + + # Let migration finish + instrument_migration.migration_stall.set() await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION +@pytest.mark.parametrize( + ( + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + ("migrate_schema_non_live", False, 1, 0), + ("migrate_schema_live", True, 2, 1), + ], +) async def test_database_migration_failed( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, ) -> None: """Test we notify if the migration fails.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch( - "homeassistant.components.recorder.migration._apply_update", + f"homeassistant.components.recorder.migration.{func_to_patch}", side_effect=ValueError, ), patch( @@ -138,9 +185,8 @@ async def test_database_migration_failed( side_effect=pn.dismiss, ) as mock_dismiss, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, expected_setup_result=expected_setup_result ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -149,18 +195,22 @@ async def test_database_migration_failed( await hass.async_block_till_done() assert recorder.util.async_migration_in_progress(hass) is False - assert len(mock_create.mock_calls) == 2 - assert len(mock_dismiss.mock_calls) == 1 + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss -async def test_database_migration_encounters_corruption( - recorder_db_url: str, hass: HomeAssistant +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_live_database_migration_encounters_corruption( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: - """Test we move away the database if its corrupt.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return + """Test we move away the database if its corrupt. + + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ assert recorder.util.async_migration_in_progress(hass) is False @@ -170,48 +220,117 @@ async def test_database_migration_encounters_corruption( ) with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - "homeassistant.components.recorder.migration.migrate_schema", + "homeassistant.components.recorder.migration.migrate_schema_live", side_effect=sqlite3_exception, ), patch( "homeassistant.components.recorder.core.move_away_broken_database" ) as move_away, patch( - "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", - ), + "homeassistant.components.recorder.core.Recorder._setup_run", + autospec=True, + wraps=recorder.Recorder._setup_run, + ) as setup_run, ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance(hass) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await async_wait_recording_done(hass) assert recorder.util.async_migration_in_progress(hass) is False - assert move_away.called + move_away.assert_called_once() + setup_run.assert_called_once() -async def test_database_migration_encounters_corruption_not_sqlite( - recorder_db_url: str, hass: HomeAssistant +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_non_live_database_migration_encounters_corruption( + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: - """Test we fail on database error when we cannot recover.""" + """Test we move away the database if its corrupt. + + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ + assert recorder.util.async_migration_in_progress(hass) is False + sqlite3_exception = DatabaseError("statement", {}, []) + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) + with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.migration._schema_is_current", side_effect=[False], ), patch( - "homeassistant.components.recorder.migration.migrate_schema", + "homeassistant.components.recorder.migration.migrate_schema_live", + ) as migrate_schema_live, + patch( + "homeassistant.components.recorder.migration.migrate_schema_non_live", + side_effect=sqlite3_exception, + ), + patch( + "homeassistant.components.recorder.core.move_away_broken_database" + ) as move_away, + patch( + "homeassistant.components.recorder.core.Recorder._setup_run", + autospec=True, + wraps=recorder.Recorder._setup_run, + ) as setup_run, + ): + await async_setup_recorder_instance(hass) + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await async_wait_recording_done(hass) + + assert recorder.util.async_migration_in_progress(hass) is False + move_away.assert_called_once() + migrate_schema_live.assert_not_called() + setup_run.assert_called_once() + + +@pytest.mark.parametrize( + ( + "live_migration", + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + (True, "migrate_schema_live", True, 2, 1), + (False, "migrate_schema_non_live", False, 1, 0), + ], +) +async def test_database_migration_encounters_corruption_not_sqlite( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + live_migration: bool, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, +) -> None: + """Test we fail on database error when we cannot recover.""" + assert recorder.util.async_migration_in_progress(hass) is False + + with ( + patch( + "homeassistant.components.recorder.migration._schema_is_current", + side_effect=[False], + ), + patch( + f"homeassistant.components.recorder.migration.{func_to_patch}", side_effect=DatabaseError("statement", {}, []), ), patch( @@ -225,10 +344,13 @@ async def test_database_migration_encounters_corruption_not_sqlite( "homeassistant.components.persistent_notification.dismiss", side_effect=pn.dismiss, ) as mock_dismiss, + patch( + "homeassistant.components.recorder.core.migration.live_migration", + return_value=live_migration, + ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} + await async_setup_recorder_instance( + hass, wait_recorder=False, expected_setup_result=expected_setup_result ) hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) @@ -238,39 +360,39 @@ async def test_database_migration_encounters_corruption_not_sqlite( assert recorder.util.async_migration_in_progress(hass) is False assert not move_away.called - assert len(mock_create.mock_calls) == 2 - assert len(mock_dismiss.mock_calls) == 1 + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss async def test_events_during_migration_are_queued( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test that events during migration are queued.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch( - "homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", - True, - ), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, - "recorder", - {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, + await async_setup_recorder_instance( + hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) hass.states.async_set("my.entity", "off", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + + # Let migration finish + instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -282,27 +404,29 @@ async def test_events_during_migration_are_queued( async def test_events_during_migration_queue_exhausted( - recorder_db_url: str, hass: HomeAssistant + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test that events during migration takes so long the queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize + ), ): - recorder_helper.async_initialize_recorder(hass) - await async_setup_component( - hass, - "recorder", - {"recorder": {"db_url": recorder_db_url, "commit_interval": 0}}, + await async_setup_recorder_instance( + hass, {"commit_interval": 0}, wait_recorder=False, wait_recorder_setup=False ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True hass.states.async_set("my.entity", "on", {}) await hass.async_block_till_done() async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) @@ -310,6 +434,9 @@ async def test_events_during_migration_queue_exhausted( async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() hass.states.async_set("my.entity", "off", {}) + + # Let migration finish + instrument_migration.migration_stall.set() await recorder.get_instance(hass).async_recorder_ready.wait() await async_wait_recording_done(hass) @@ -328,10 +455,23 @@ async def test_events_during_migration_queue_exhausted( @pytest.mark.parametrize( ("start_version", "live"), - [(0, True), (16, True), (18, True), (22, True), (25, True)], + [ + (0, False), + (9, False), + (16, False), + (18, False), + (22, False), + (25, False), + (43, True), + ], ) async def test_schema_migrate( - recorder_db_url: str, hass: HomeAssistant, start_version, live + hass: HomeAssistant, + recorder_db_url: str, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, + start_version, + live, ) -> None: """Test the full schema migration logic. @@ -340,11 +480,6 @@ async def test_schema_migrate( inspection could quickly become quite cumbersome. """ - migration_done = threading.Event() - migration_stall = threading.Event() - migration_version = None - real_migrate_schema = recorder.migration.migrate_schema - real_apply_update = recorder.migration._apply_update real_create_index = recorder.migration._create_index create_calls = 0 @@ -371,33 +506,6 @@ async def test_schema_migrate( start=self.recorder_runs_manager.recording_start, created=dt_util.utcnow() ) - def _instrument_migrate_schema(*args): - """Control migration progress and check results.""" - nonlocal migration_done - nonlocal migration_version - try: - real_migrate_schema(*args) - except Exception: - migration_done.set() - raise - - # Check and report the outcome of the migration; if migration fails - # the recorder will silently create a new database. - with session_scope(hass=hass, read_only=True) as session: - res = ( - session.query(db_schema.SchemaChanges) - .order_by(db_schema.SchemaChanges.change_id.desc()) - .first() - ) - migration_version = res.schema_version - migration_done.set() - - def _instrument_apply_update(*args): - """Control migration progress.""" - nonlocal migration_stall - migration_stall.wait() - real_apply_update(*args) - def _sometimes_failing_create_index(*args): """Make the first index create raise a retryable error to ensure we retry.""" if recorder_db_url.startswith("mysql://"): @@ -410,7 +518,6 @@ async def test_schema_migrate( real_create_index(*args) with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), patch( "homeassistant.components.recorder.core.create_engine", new=_create_engine_test, @@ -420,22 +527,11 @@ async def test_schema_migrate( side_effect=_mock_setup_run, autospec=True, ) as setup_run, - patch( - "homeassistant.components.recorder.migration.migrate_schema", - wraps=_instrument_migrate_schema, - ), - patch( - "homeassistant.components.recorder.migration._apply_update", - wraps=_instrument_apply_update, - ) as apply_update_mock, patch("homeassistant.components.recorder.util.time.sleep"), patch( "homeassistant.components.recorder.migration._create_index", wraps=_sometimes_failing_create_index, ), - patch( - "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", - ), patch( "homeassistant.components.recorder.Recorder._process_state_changed_event_into_session", ), @@ -446,24 +542,23 @@ async def test_schema_migrate( "homeassistant.components.recorder.Recorder._pre_process_startup_events", ), ): - recorder_helper.async_initialize_recorder(hass) - hass.async_create_task( - async_setup_component( - hass, "recorder", {"recorder": {"db_url": recorder_db_url}} - ) + await async_setup_recorder_instance( + hass, wait_recorder=False, wait_recorder_setup=live ) + await hass.async_add_executor_job(instrument_migration.migration_started.wait) + assert recorder.util.async_migration_in_progress(hass) is True await recorder_helper.async_wait_recorder(hass) assert recorder.util.async_migration_in_progress(hass) is True assert recorder.util.async_migration_is_live(hass) == live - migration_stall.set() + instrument_migration.migration_stall.set() await hass.async_block_till_done() - await hass.async_add_executor_job(migration_done.wait) + await hass.async_add_executor_job(instrument_migration.migration_done.wait) await async_wait_recording_done(hass) - assert migration_version == db_schema.SCHEMA_VERSION + assert instrument_migration.migration_version == db_schema.SCHEMA_VERSION assert setup_run.called assert recorder.util.async_migration_in_progress(hass) is not True - assert apply_update_mock.called + assert instrument_migration.apply_update_mock.called def test_invalid_update(hass: HomeAssistant) -> None: @@ -638,12 +733,13 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: migration.raise_if_exception_missing_str(programming_exc, ["not present"]) +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: - """Test that we can rebuild the states table in SQLite.""" - if not recorder_db_url.startswith("sqlite://"): - # This test is specific for SQLite - return + """Test that we can rebuild the states table in SQLite. + This test is specific for SQLite. + """ engine = create_engine(recorder_db_url) session_maker = scoped_session(sessionmaker(bind=engine, future=True)) with session_scope(session=session_maker()) as session: @@ -661,14 +757,15 @@ def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: engine.dispose() +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") def test_rebuild_sqlite_states_table_missing_fails( recorder_db_url: str, caplog: pytest.LogCaptureFixture ) -> None: - """Test handling missing states table when attempting rebuild.""" - if not recorder_db_url.startswith("sqlite://"): - # This test is specific for SQLite - return + """Test handling missing states table when attempting rebuild. + This test is specific for SQLite. + """ engine = create_engine(recorder_db_url) session_maker = scoped_session(sessionmaker(bind=engine, future=True)) with session_scope(session=session_maker()) as session: @@ -695,14 +792,15 @@ def test_rebuild_sqlite_states_table_missing_fails( engine.dispose() +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") def test_rebuild_sqlite_states_table_extra_columns( recorder_db_url: str, caplog: pytest.LogCaptureFixture ) -> None: - """Test handling extra columns when rebuilding the states table.""" - if not recorder_db_url.startswith("sqlite://"): - # This test is specific for SQLite - return + """Test handling extra columns when rebuilding the states table. + This test is specific for SQLite. + """ engine = create_engine(recorder_db_url) session_maker = scoped_session(sessionmaker(bind=engine, future=True)) with session_scope(session=session_maker()) as session: @@ -722,3 +820,164 @@ def test_rebuild_sqlite_states_table_extra_columns( assert session.query(States).first().state == "on" engine.dispose() + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id"), + ("states", "event_id"), # This won't be found + ("states", "old_state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": "events_data_id_fkey", + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": "states_old_state_id_fkey", + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + with Session(engine) as session: + session_maker = Mock(return_value=session) + dropped_constraints_1 = [ + dropped_constraint + for table, column in constraints_to_recreate + for dropped_constraint in migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + ] + assert dropped_constraints_1 == expected_dropped_constraints[db_engine] + + # Check we don't find the constrained columns again (they are removed) + with Session(engine) as session: + session_maker = Mock(return_value=session) + dropped_constraints_2 = [ + dropped_constraint + for table, column in constraints_to_recreate + for dropped_constraint in migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + ] + assert dropped_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, dropped_constraints_1 + ) + + # Check we do find the constrained columns again (they are restored) + with Session(engine) as session: + session_maker = Mock(return_value=session) + dropped_constraints_3 = [ + dropped_constraint + for table, column in constraints_to_recreate + for dropped_constraint in migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + ] + assert dropped_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + +def test_restore_foreign_key_constraints_with_error( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_restore = [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": "events_data_id_fkey", + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ] + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_restore + ) + + assert "Could not update foreign options in events table" in caplog.text diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 8fda495cf60..b2a83ae8313 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -12,7 +12,6 @@ import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session -from typing_extensions import AsyncGenerator from homeassistant.components import recorder from homeassistant.components.recorder import ( @@ -33,13 +32,7 @@ from homeassistant.components.recorder.queries import ( get_migration_changes, select_event_type_ids, ) -from homeassistant.components.recorder.tasks import ( - EntityIDMigrationTask, - EntityIDPostMigrationTask, - EventsContextIDMigrationTask, - EventTypeIDMigrationTask, - StatesContextIDMigrationTask, -) +from homeassistant.components.recorder.tasks import EntityIDPostMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, session_scope, @@ -49,6 +42,7 @@ import homeassistant.util.dt as dt_util from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import ( + MockMigrationTask, async_attach_db_engine, async_recorder_block_till_done, async_wait_recording_done, @@ -60,6 +54,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -110,27 +111,17 @@ def db_schema_32(): patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): yield -@pytest.fixture(name="legacy_recorder_mock") -async def legacy_recorder_mock_fixture( - recorder_mock: Recorder, -) -> AsyncGenerator[Recorder]: - """Fixture for legacy recorder mock.""" - with patch.object(recorder_mock.states_meta_manager, "active", False): - yield recorder_mock - - @pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_events_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -224,7 +215,7 @@ async def test_migrate_events_context_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) now = dt_util.utcnow() @@ -233,7 +224,8 @@ async def test_migrate_events_context_ids( with freeze_time(now): # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventsContextIDMigrationTask()) + migrator = migration.EventsContextIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _object_as_dict(obj): @@ -260,7 +252,7 @@ async def test_migrate_events_context_ids( assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] assert old_uuid_context_id_event["context_id"] is None @@ -331,7 +323,9 @@ async def test_migrate_events_context_ids( event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None ) - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EventsContextIDMigration.migration_id] == migration.EventsContextIDMigration.migration_version @@ -340,10 +334,9 @@ async def test_migrate_events_context_ids( @pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_states_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -419,10 +412,11 @@ async def test_migrate_states_context_ids( ) ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await async_wait_recording_done(hass) - instance.queue_task(StatesContextIDMigrationTask()) + migrator = migration.StatesContextIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _object_as_dict(obj): @@ -449,7 +443,9 @@ async def test_migrate_states_context_ids( assert len(events) == 6 return {state.entity_id: _object_as_dict(state) for state in events} - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] assert old_uuid_context_id["context_id"] is None @@ -524,7 +520,9 @@ async def test_migrate_states_context_ids( == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" ) - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.StatesContextIDMigration.migration_id] == migration.StatesContextIDMigration.migration_version @@ -533,10 +531,9 @@ async def test_migrate_states_context_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) async def test_migrate_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate event_types to the EventTypes table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -563,11 +560,12 @@ async def test_migrate_event_type_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventTypeIDMigrationTask()) + migrator = migration.EventTypeIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -599,21 +597,23 @@ async def test_migrate_event_type_ids( ) return result - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type["event_type_two"]) == 1 def _get_many(): with session_scope(hass=hass, read_only=True) as session: - return instance.event_type_manager.get_many( + return recorder_mock.event_type_manager.get_many( ("event_type_one", "event_type_two"), session ) - mapped = await instance.async_add_executor_job(_get_many) + mapped = await recorder_mock.async_add_executor_job(_get_many) assert mapped["event_type_one"] is not None assert mapped["event_type_two"] is not None - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -621,11 +621,8 @@ async def test_migrate_event_type_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) -async def test_migrate_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -652,11 +649,12 @@ async def test_migrate_entity_ids( ) ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDMigrationTask()) + migrator = migration.EntityIDMigration(None, None) + recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -683,11 +681,15 @@ async def test_migrate_entity_ids( ) return result - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) assert len(states_by_entity_id["sensor.two"]) == 2 assert len(states_by_entity_id["sensor.one"]) == 1 - migration_changes = await instance.async_add_executor_job(_get_migration_id, hass) + migration_changes = await recorder_mock.async_add_executor_job( + _get_migration_id, hass + ) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -696,10 +698,9 @@ async def test_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) async def test_post_migrate_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -726,11 +727,11 @@ async def test_post_migrate_entity_ids( ) ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDPostMigrationTask()) + recorder_mock.queue_task(EntityIDPostMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -742,7 +743,7 @@ async def test_post_migrate_entity_ids( assert len(states) == 3 return {state.state: state.entity_id for state in states} - states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_state = await recorder_mock.async_add_executor_job(_fetch_migrated_states) assert states_by_state["one_1"] is None assert states_by_state["two_2"] is None assert states_by_state["two_1"] is None @@ -750,10 +751,9 @@ async def test_post_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) async def test_migrate_null_entity_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -783,11 +783,12 @@ async def test_migrate_null_entity_ids( ), ) - await instance.async_add_executor_job(_insert_states) + await recorder_mock.async_add_executor_job(_insert_states) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EntityIDMigrationTask()) + migrator = migration.EntityIDMigration(None, None) + recorder_mock.queue_task(migration.CommitBeforeMigrationTask(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_states(): @@ -814,7 +815,9 @@ async def test_migrate_null_entity_ids( ) return result - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + states_by_entity_id = await recorder_mock.async_add_executor_job( + _fetch_migrated_states + ) assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 assert len(states_by_entity_id["sensor.one"]) == 2 @@ -822,7 +825,7 @@ async def test_migrate_null_entity_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await instance.async_add_executor_job(_get_migration_id) + migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EntityIDMigration.migration_id] == migration.EntityIDMigration.migration_version @@ -831,10 +834,9 @@ async def test_migrate_null_entity_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) async def test_migrate_null_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -864,11 +866,12 @@ async def test_migrate_null_event_type_ids( ), ) - await instance.async_add_executor_job(_insert_events) + await recorder_mock.async_add_executor_job(_insert_events) await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventTypeIDMigrationTask()) + migrator = migration.EventTypeIDMigration(None, None) + recorder_mock.queue_task(migrator.task(migrator)) await _async_wait_migration_done(hass) def _fetch_migrated_events(): @@ -900,7 +903,7 @@ async def test_migrate_null_event_type_ids( ) return result - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + events_by_type = await recorder_mock.async_add_executor_job(_fetch_migrated_events) assert len(events_by_type["event_type_one"]) == 2 assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 @@ -908,7 +911,7 @@ async def test_migrate_null_event_type_ids( with session_scope(hass=hass, read_only=True) as session: return dict(execute_stmt_lambda_element(session, get_migration_changes())) - migration_changes = await instance.async_add_executor_job(_get_migration_id) + migration_changes = await recorder_mock.async_add_executor_job(_get_migration_id) assert ( migration_changes[migration.EventTypeIDMigration.migration_id] == migration.EventTypeIDMigration.migration_version @@ -916,11 +919,9 @@ async def test_migrate_null_event_type_ids( async def test_stats_timestamp_conversion_is_reentrant( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration is reentrant.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -932,7 +933,7 @@ async def test_stats_timestamp_conversion_is_reentrant( def _do_migration(): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -949,7 +950,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add( old_db_schema.StatisticsShortTerm( @@ -964,7 +965,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add( db_schema.StatisticsShortTerm( @@ -1070,11 +1071,9 @@ async def test_stats_timestamp_conversion_is_reentrant( async def test_stats_timestamp_with_one_by_one( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1091,7 +1090,7 @@ async def test_stats_timestamp_with_one_by_one( side_effect=IntegrityError("test", "test", "test"), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -1108,7 +1107,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1135,7 +1134,7 @@ async def test_stats_timestamp_with_one_by_one( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1291,11 +1290,9 @@ async def test_stats_timestamp_with_one_by_one( async def test_stats_timestamp_with_one_by_one_removes_duplicates( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test stats migration with one by one removes duplicates.""" - instance = await async_setup_recorder_instance(hass) await async_wait_recording_done(hass) await async_attach_db_engine(hass) importlib.import_module(SCHEMA_MODULE) @@ -1319,7 +1316,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ), ): migration._migrate_statistics_columns_to_timestamp_removing_duplicates( - hass, instance, instance.get_session, instance.engine + hass, recorder_mock, recorder_mock.get_session, recorder_mock.engine ) def _insert_fake_metadata(): @@ -1336,7 +1333,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_pre_timestamp_stat(date_time: datetime) -> None: + def _insert_pre_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( @@ -1363,7 +1360,7 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( ) ) - def _insert_post_timestamp_stat(date_time: datetime) -> None: + def _insert_post_timestamp_stat(date_time: datetime.datetime) -> None: with session_scope(hass=hass) as session: session.add_all( ( diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index 4f59edb097f..bdd881a3a7b 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -1,7 +1,6 @@ """Test run time migrations are remembered in the migration_changes table.""" import importlib -from pathlib import Path import sys from unittest.mock import patch @@ -11,8 +10,8 @@ from sqlalchemy.orm import Session from homeassistant.components import recorder from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder.migration import MigrationTask from homeassistant.components.recorder.queries import get_migration_changes -from homeassistant.components.recorder.tasks import StatesContextIDMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, session_scope, @@ -20,7 +19,11 @@ from homeassistant.components.recorder.util import ( from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant -from .common import async_recorder_block_till_done, async_wait_recording_done +from .common import ( + MockMigrationTask, + async_recorder_block_till_done, + async_wait_recording_done, +) from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -29,6 +32,13 @@ CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + async def _async_wait_migration_done(hass: HomeAssistant) -> None: """Wait for the migration to be done.""" await recorder.get_instance(hass).async_block_till_done() @@ -63,10 +73,10 @@ def _create_engine_test(*args, **kwargs): @pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migration_changes_prevent_trying_to_migrate_again( - async_setup_recorder_instance: RecorderInstanceGenerator, - tmp_path: Path, - recorder_db_url: str, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Test that we do not try to migrate when migration_changes indicate its already migrated. @@ -76,15 +86,8 @@ async def test_migration_changes_prevent_trying_to_migrate_again( 2. With current schema so the migration happens 3. With current schema to verify we do not have to query to see if the migration is done """ - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test uses a test database between runs so its - # SQLite specific - return - config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), - recorder.CONF_COMMIT_INTERVAL: 1, - } + config = {recorder.CONF_COMMIT_INTERVAL: 1} importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -100,11 +103,13 @@ async def test_migration_changes_prevent_trying_to_migrate_again( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch.object(core, "StateAttributes", old_db_schema.StateAttributes), - patch.object(migration.EntityIDMigration, "task", core.RecorderTask), + patch.object(migration.EntityIDMigration, "task", MockMigrationTask), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), ): - async with async_test_home_assistant() as hass: - await async_setup_recorder_instance(hass, config) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -113,8 +118,7 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() # Now start again with current db schema - async with async_test_home_assistant() as hass: - await async_setup_recorder_instance(hass, config) + async with async_test_home_assistant() as hass, async_test_recorder(hass, config): await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -138,19 +142,21 @@ async def test_migration_changes_prevent_trying_to_migrate_again( original_queue_task(self, task) # Finally verify we did not call needs_migrate_query on StatesContextIDMigration - async with async_test_home_assistant() as hass: - with ( - patch( - "homeassistant.components.recorder.core.Recorder.queue_task", - _queue_task, - ), - patch.object( - migration.StatesContextIDMigration, - "needs_migrate_query", - side_effect=RuntimeError("Should not be called"), - ), + with ( + patch( + "homeassistant.components.recorder.core.Recorder.queue_task", + _queue_task, + ), + patch.object( + migration.StatesContextIDMigration, + "needs_migrate_query", + side_effect=RuntimeError("Should not be called"), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass, config), ): - await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() await async_wait_recording_done(hass) await _async_wait_migration_done(hass) @@ -167,4 +173,4 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() for task in tasks: - assert not isinstance(task, StatesContextIDMigrationTask) + assert not isinstance(task, MigrationTask) diff --git a/tests/components/recorder/test_models.py b/tests/components/recorder/test_models.py index d06c4a629d7..975d67a8e99 100644 --- a/tests/components/recorder/test_models.py +++ b/tests/components/recorder/test_models.py @@ -15,11 +15,9 @@ from homeassistant.components.recorder.db_schema import ( ) from homeassistant.components.recorder.models import ( LazyState, - bytes_to_ulid_or_none, process_datetime_to_timestamp, process_timestamp, process_timestamp_to_utc_isoformat, - ulid_to_bytes_or_none, ) from homeassistant.const import EVENT_STATE_CHANGED import homeassistant.core as ha @@ -428,27 +426,3 @@ async def test_process_datetime_to_timestamp_mirrors_utc_isoformat_behavior( process_datetime_to_timestamp(datetime_hst_timezone) == dt_util.parse_datetime("2016-07-09T21:00:00+00:00").timestamp() ) - - -def test_ulid_to_bytes_or_none(caplog: pytest.LogCaptureFixture) -> None: - """Test ulid_to_bytes_or_none.""" - - assert ( - ulid_to_bytes_or_none("01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1") - == b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1" - ) - assert ulid_to_bytes_or_none("invalid") is None - assert "invalid" in caplog.text - assert ulid_to_bytes_or_none(None) is None - - -def test_bytes_to_ulid_or_none(caplog: pytest.LogCaptureFixture) -> None: - """Test bytes_to_ulid_or_none.""" - - assert ( - bytes_to_ulid_or_none(b"\x01w\xaf\xf9w\xe5\xf8~\x1f\x87\xe1\xf8~\x1f\x87\xe1") - == "01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1" - ) - assert bytes_to_ulid_or_none(b"invalid") is None - assert "invalid" in caplog.text - assert bytes_to_ulid_or_none(None) is None diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 1167fd4de73..60ee913cb66 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1,5 +1,6 @@ """Test data purging.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -9,10 +10,9 @@ from freezegun import freeze_time import pytest from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session -from typing_extensions import Generator from voluptuous.error import MultipleInvalid -from homeassistant.components import recorder +from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, Recorder from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.db_schema import ( Events, @@ -35,7 +35,6 @@ from homeassistant.components.recorder.tasks import PurgeTask from homeassistant.components.recorder.util import session_scope from homeassistant.const import EVENT_STATE_CHANGED, EVENT_THEMES_UPDATED, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util from .common import ( @@ -58,6 +57,13 @@ TEST_EVENT_TYPES = ( ) +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.fixture(name="use_sqlite") def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: """Pytest fixture to switch purge method.""" @@ -70,47 +76,42 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_big_database( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting 2/3 old states from a big database.""" - - instance = await async_setup_recorder_instance(hass) - for _ in range(12): await _add_test_states(hass, wait_recording_done=False) await async_wait_recording_done(hass) with ( - patch.object(instance, "max_bind_vars", 72), - patch.object(instance.database_engine, "max_bind_vars", 72), - session_scope(hass=hass) as session, + patch.object(recorder_mock, "max_bind_vars", 72), + patch.object(recorder_mock.database_engine, "max_bind_vars", 72), ): - states = session.query(States) - state_attributes = session.query(StateAttributes) - assert states.count() == 72 - assert state_attributes.count() == 3 + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) + assert states.count() == 72 + assert state_attributes.count() == 3 purge_before = dt_util.utcnow() - timedelta(days=4) finished = purge_old_data( - instance, + recorder_mock, purge_before, states_batch_size=1, events_batch_size=1, repack=False, ) assert not finished - assert states.count() == 24 - assert state_attributes.count() == 1 + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) + assert states.count() == 24 + assert state_attributes.count() == 1 -async def test_purge_old_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_states(hass) # make sure we start with 6 states @@ -125,24 +126,30 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id - purge_before = dt_util.utcnow() - timedelta(days=4) + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + purge_before = dt_util.utcnow() - timedelta(days=4) + + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id + with session_scope(hass=hass) as session: states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order # but we can look them up by state @@ -153,27 +160,33 @@ async def test_purge_old_states( assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in instance.states_manager._last_committed_id + assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -187,25 +200,22 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return - - await async_setup_recorder_instance(hass) + """Test database image image is malformed while deleting old states. + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ await _add_test_states(hass) await async_wait_recording_done(hass) @@ -221,7 +231,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -234,13 +244,11 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_states(hass) await async_wait_recording_done(hass) @@ -253,9 +261,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(instance.engine.dialect, "name", "mysql"), + patch.object(recorder_mock.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -264,14 +272,12 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called +@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" - await async_setup_recorder_instance(hass) - await _add_test_states(hass) await async_wait_recording_done(hass) @@ -281,7 +287,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -290,12 +296,8 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_events(hass) with session_scope(hass=hass) as session: @@ -304,38 +306,46 @@ async def test_purge_old_events( ) assert events.count() == 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert not finished all_events = events.all() assert events.count() == 2, f"Should have 2 events left: {all_events}" - # we should only have 2 events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + # we should only have 2 events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == 2 async def test_purge_old_recorder_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old recorder runs keeps current run.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_recorder_runs(hass) # make sure we start with 7 recorder runs @@ -343,35 +353,36 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old statistics runs keeps the latest run.""" - instance = await async_setup_recorder_instance(hass) - await _add_test_statistics_runs(hass) # make sure we start with 7 statistics runs @@ -379,20 +390,23 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(instance, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert not finished - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -410,8 +424,6 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start - await async_setup_recorder_instance(hass) - service_data = {"keep_days": 4} await _add_test_events(hass) await _add_test_states(hass) @@ -517,8 +529,8 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) async def test_purge_edge_case( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" @@ -552,11 +564,9 @@ async def test_purge_edge_case( attributes_id=1002, ) ) - instance = recorder.get_instance(hass) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) - await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -575,7 +585,7 @@ async def test_purge_edge_case( ) assert events.count() == 1 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -590,10 +600,7 @@ async def test_purge_edge_case( assert events.count() == 0 -async def test_purge_cutoff_date( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, -) -> None: +async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -656,10 +663,9 @@ async def test_purge_cutoff_date( attributes_id=1000 + row, ) ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) - instance = await async_setup_recorder_instance(hass, None) await async_wait_purge_done(hass) service_data = {"keep_days": 2} @@ -695,7 +701,7 @@ async def test_purge_cutoff_date( == 1 ) - instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -736,7 +742,9 @@ async def test_purge_cutoff_date( ) # Make sure we can purge everything - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -747,7 +755,9 @@ async def test_purge_cutoff_date( assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -759,15 +769,16 @@ async def test_purge_cutoff_date( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] +) async def test_purge_filtered_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged.""" - config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -850,8 +861,8 @@ async def test_purge_filtered_states( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -865,7 +876,7 @@ async def test_purge_filtered_states( assert events_keep.count() == 1 # Normal purge doesn't remove excluded entities - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -881,7 +892,7 @@ async def test_purge_filtered_states( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -929,7 +940,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 # Do it again to make sure nothing changes - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -941,7 +952,7 @@ async def test_purge_filtered_states( assert session.query(StateAttributes).count() == 11 service_data = {"keep_days": 0} - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -954,15 +965,16 @@ async def test_purge_filtered_states( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.excluded"]}}] +) async def test_purge_filtered_states_to_empty( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered states are purged all the way to an empty db.""" - config: ConfigType = {"exclude": {"entities": ["sensor.excluded"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.excluded") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -977,7 +989,7 @@ async def test_purge_filtered_states_to_empty( timestamp, event_id * days, ) - convert_pending_states_to_meta(instance, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -990,7 +1002,7 @@ async def test_purge_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1002,21 +1014,22 @@ async def test_purge_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"entities": ["sensor.old_format"]}}] +) async def test_purge_without_state_attributes_filtered_states_to_empty( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, use_sqlite: bool, ) -> None: """Test filtered legacy states without state attributes are purged all the way to an empty db.""" - config: ConfigType = {"exclude": {"entities": ["sensor.old_format"]}} - instance = await async_setup_recorder_instance(hass, config) - assert instance.entity_filter("sensor.old_format") is False + assert recorder_mock.entity_filter("sensor.old_format") is False def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1053,8 +1066,8 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) service_data = {"keep_days": 10} _add_db_entries(hass) @@ -1067,7 +1080,7 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -1079,18 +1092,18 @@ async def test_purge_without_state_attributes_filtered_states_to_empty( # Do it again to make sure nothing changes # Why do we do this? Should we check the end result? - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) +@pytest.mark.parametrize( + "recorder_config", [{"exclude": {"event_types": ["EVENT_PURGE"]}}] +) async def test_purge_filtered_events( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test filtered events are purged.""" - config: ConfigType = {"exclude": {"event_types": ["EVENT_PURGE"]}} - instance = await async_setup_recorder_instance(hass, config) await async_wait_recording_done(hass) def _add_db_entries(hass: HomeAssistant) -> None: @@ -1119,11 +1132,11 @@ async def test_purge_filtered_events( timestamp, event_id, ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10} - await instance.async_add_executor_job(_add_db_entries, hass) + await recorder_mock.async_add_executor_job(_add_db_entries, hass) await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: @@ -1135,7 +1148,7 @@ async def test_purge_filtered_events( assert states.count() == 10 # Normal purge doesn't remove excluded events - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1151,7 +1164,7 @@ async def test_purge_filtered_events( # Test with 'apply_filter' = True service_data["apply_filter"] = True - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -1169,23 +1182,26 @@ async def test_purge_filtered_events( assert states.count() == 10 +@pytest.mark.parametrize( + "recorder_config", + [ + { + "exclude": { + "event_types": ["excluded_event"], + "entities": ["sensor.excluded", "sensor.old_format"], + } + } + ], +) async def test_purge_filtered_events_state_changed( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test filtered state_changed events are purged. This should also remove all states.""" - config: ConfigType = { - "exclude": { - "event_types": ["excluded_event"], - "entities": ["sensor.excluded", "sensor.old_format"], - } - } - instance = await async_setup_recorder_instance(hass, config) # Assert entity_id is NOT excluded - assert instance.entity_filter("sensor.excluded") is False - assert instance.entity_filter("sensor.old_format") is False - assert instance.entity_filter("sensor.keep") is True - assert "excluded_event" in instance.exclude_event_types + assert recorder_mock.entity_filter("sensor.excluded") is False + assert recorder_mock.entity_filter("sensor.old_format") is False + assert recorder_mock.entity_filter("sensor.keep") is True + assert "excluded_event" in recorder_mock.exclude_event_types def _add_db_entries(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1258,8 +1274,8 @@ async def test_purge_filtered_events_state_changed( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - convert_pending_events_to_event_types(instance, session) - convert_pending_states_to_meta(instance, session) + convert_pending_events_to_event_types(recorder_mock, session) + convert_pending_states_to_meta(recorder_mock, session) service_data = {"keep_days": 10, "apply_filter": True} _add_db_entries(hass) @@ -1277,7 +1293,7 @@ async def test_purge_filtered_events_state_changed( assert events_purge.count() == 1 assert states.count() == 64 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() for _ in range(4): @@ -1311,11 +1327,8 @@ async def test_purge_filtered_events_state_changed( ) # should have been kept -async def test_purge_entities( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test purging of specific entities.""" - instance = await async_setup_recorder_instance(hass) async def _purge_entities(hass, entity_ids, domains, entity_globs): service_data = { @@ -1325,7 +1338,7 @@ async def test_purge_entities( } await hass.services.async_call( - recorder.DOMAIN, SERVICE_PURGE_ENTITIES, service_data + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, service_data ) await hass.async_block_till_done() @@ -1363,8 +1376,8 @@ async def test_purge_entities( timestamp, event_id * days, ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) def _add_keep_records(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: @@ -1378,8 +1391,8 @@ async def test_purge_entities( timestamp, event_id, ) - convert_pending_states_to_meta(instance, session) - convert_pending_events_to_event_types(instance, session) + convert_pending_states_to_meta(recorder_mock, session) + convert_pending_events_to_event_types(recorder_mock, session) _add_purge_records(hass) _add_keep_records(hass) @@ -1657,15 +1670,14 @@ def _add_state_with_state_attributes( @pytest.mark.timeout(30) async def test_purge_many_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events.""" old_events_count = 5 - instance = await async_setup_recorder_instance(hass) with ( - patch.object(instance, "max_bind_vars", old_events_count), - patch.object(instance.database_engine, "max_bind_vars", old_events_count), + patch.object(recorder_mock, "max_bind_vars", old_events_count), + patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -1675,48 +1687,62 @@ async def test_purge_many_old_events( ) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert not finished assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, + # we should only have 2 groups of events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - instance, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, + # we should now purge everything + finished = purge_old_data( + recorder_mock, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter( + Events.event_type_id.in_(select_event_type_ids(TEST_EVENT_TYPES)) ) - assert finished assert events.count() == 0 async def test_purge_old_events_purges_the_event_type_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events purges event type ids.""" - instance = await async_setup_recorder_instance(hass) - assert instance.event_type_manager.active is True + assert recorder_mock.event_type_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1760,7 +1786,7 @@ async def test_purge_old_events_purges_the_event_type_ids( time_fired_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return instance.event_type_manager.get_many( + return recorder_mock.event_type_manager.get_many( [ "EVENT_TEST_AUTOPURGE", "EVENT_TEST_PURGE", @@ -1770,7 +1796,7 @@ async def test_purge_old_events_purges_the_event_type_ids( session, ) - event_type_to_id = await instance.async_add_executor_job(_insert_events) + event_type_to_id = await recorder_mock.async_add_executor_job(_insert_events) test_event_type_ids = event_type_to_id.values() with session_scope(hass=hass) as session: events = session.query(Events).where( @@ -1783,47 +1809,70 @@ async def test_purge_old_events_purges_the_event_type_ids( assert events.count() == 30 assert event_types.count() == 4 - # run purge_old_data() - finished = purge_old_data( - instance, - far_past, - repack=False, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + far_past, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 30 # We should remove the unused event type assert event_types.count() == 3 - assert "EVENT_TEST_UNUSED" not in instance.event_type_manager._id_map + assert "EVENT_TEST_UNUSED" not in recorder_mock.event_type_manager._id_map - # we should only have 10 events left since - # only one event type was recorded now - finished = purge_old_data( - instance, - utcnow, - repack=False, + # we should only have 10 events left since + # only one event type was recorded now + finished = purge_old_data( + recorder_mock, + utcnow, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 10 assert event_types.count() == 1 - # Purge everything - finished = purge_old_data( - instance, - utcnow + timedelta(seconds=1), - repack=False, + # Purge everything + finished = purge_old_data( + recorder_mock, + utcnow + timedelta(seconds=1), + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).where( + Events.event_type_id.in_(test_event_type_ids) + ) + event_types = session.query(EventTypes).where( + EventTypes.event_type_id.in_(test_event_type_ids) ) - assert finished assert events.count() == 0 assert event_types.count() == 0 async def test_purge_old_states_purges_the_state_metadata_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old states purges state metadata_ids.""" - instance = await async_setup_recorder_instance(hass) - assert instance.states_meta_manager.active is True + assert recorder_mock.states_meta_manager.active is True utcnow = dt_util.utcnow() five_days_ago = utcnow - timedelta(days=5) @@ -1867,13 +1916,15 @@ async def test_purge_old_states_purges_the_state_metadata_ids( last_updated_ts=dt_util.utc_to_timestamp(timestamp), ) ) - return instance.states_meta_manager.get_many( + return recorder_mock.states_meta_manager.get_many( ["sensor.one", "sensor.two", "sensor.three", "sensor.unused"], session, True, ) - entity_id_to_metadata_id = await instance.async_add_executor_job(_insert_states) + entity_id_to_metadata_id = await recorder_mock.async_add_executor_job( + _insert_states + ) test_metadata_ids = entity_id_to_metadata_id.values() with session_scope(hass=hass) as session: states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) @@ -1884,47 +1935,63 @@ async def test_purge_old_states_purges_the_state_metadata_ids( assert states.count() == 30 assert states_meta.count() == 4 - # run purge_old_data() - finished = purge_old_data( - instance, - far_past, - repack=False, + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + far_past, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 30 # We should remove the unused entity_id assert states_meta.count() == 3 - assert "sensor.unused" not in instance.event_type_manager._id_map + assert "sensor.unused" not in recorder_mock.event_type_manager._id_map - # we should only have 10 states left since - # only one event type was recorded now - finished = purge_old_data( - instance, - utcnow, - repack=False, + # we should only have 10 states left since + # only one event type was recorded now + finished = purge_old_data( + recorder_mock, + utcnow, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 10 assert states_meta.count() == 1 - # Purge everything - finished = purge_old_data( - instance, - utcnow + timedelta(seconds=1), - repack=False, + # Purge everything + finished = purge_old_data( + recorder_mock, + utcnow + timedelta(seconds=1), + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States).where(States.metadata_id.in_(test_metadata_ids)) + states_meta = session.query(StatesMeta).where( + StatesMeta.metadata_id.in_(test_metadata_ids) ) - assert finished assert states.count() == 0 assert states_meta.count() == 0 async def test_purge_entities_keep_days( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging states with an entity filter and keep_days.""" - instance = await async_setup_recorder_instance(hass, {}) await hass.async_block_till_done() await async_wait_recording_done(hass) start = dt_util.utcnow() @@ -1946,7 +2013,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1957,7 +2024,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1967,7 +2034,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1978,7 +2045,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1987,7 +2054,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index 8a641a2ce7f..0754b2e911c 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -1,5 +1,6 @@ """Test data purging.""" +from collections.abc import Generator from datetime import datetime, timedelta import json import sqlite3 @@ -10,10 +11,12 @@ import pytest from sqlalchemy import text, update from sqlalchemy.exc import DatabaseError, OperationalError from sqlalchemy.orm.session import Session -from typing_extensions import Generator -from homeassistant.components import recorder -from homeassistant.components.recorder import migration +from homeassistant.components.recorder import ( + DOMAIN as RECORDER_DOMAIN, + Recorder, + migration, +) from homeassistant.components.recorder.const import SupportedDialect from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.purge import purge_old_data @@ -47,6 +50,13 @@ from .db_schema_32 import ( from tests.typing import RecorderInstanceGenerator +@pytest.fixture +async def mock_recorder_before_hass( + async_test_recorder: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + @pytest.fixture(autouse=True) def db_schema_32(): """Fixture to initialize the db with the old schema 32.""" @@ -66,11 +76,8 @@ def mock_use_sqlite(request: pytest.FixtureRequest) -> Generator[None]: yield -async def test_purge_old_states( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old states.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -87,23 +94,27 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id states_after_purge = list(session.query(States)) # Since these states are deleted in batches, we can't guarantee the order @@ -115,27 +126,35 @@ async def test_purge_old_states( assert dontpurgeme_5.old_state_id == dontpurgeme_4.state_id assert dontpurgeme_4.old_state_id is None - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 2 assert state_attributes.count() == 1 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id - # run purge_old_data again - purge_before = dt_util.utcnow() - finished = purge_old_data( - instance, - purge_before, - states_batch_size=1, - events_batch_size=1, - repack=False, - ) - assert not finished + # run purge_old_data again + purge_before = dt_util.utcnow() + finished = purge_old_data( + recorder_mock, + purge_before, + states_batch_size=1, + events_batch_size=1, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states = session.query(States) + state_attributes = session.query(StateAttributes) assert states.count() == 0 assert state_attributes.count() == 0 - assert "test.recorder2" not in instance.states_manager._last_committed_id + assert "test.recorder2" not in recorder_mock.states_manager._last_committed_id # Add some more states await _add_test_states(hass) @@ -149,24 +168,22 @@ async def test_purge_old_states( events = session.query(Events).filter(Events.event_type == "state_changed") assert events.count() == 0 - assert "test.recorder2" in instance.states_manager._last_committed_id + assert "test.recorder2" in recorder_mock.states_manager._last_committed_id state_attributes = session.query(StateAttributes) assert state_attributes.count() == 3 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("recorder_mock", "skip_by_db_engine") async def test_purge_old_states_encouters_database_corruption( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - recorder_db_url: str, ) -> None: - """Test database image image is malformed while deleting old states.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite, wiping the database on error only happens - # with SQLite. - return + """Test database image image is malformed while deleting old states. - await async_setup_recorder_instance(hass) + This test is specific for SQLite, wiping the database on error only happens + with SQLite. + """ await async_attach_db_engine(hass) await _add_test_states(hass) @@ -184,7 +201,7 @@ async def test_purge_old_states_encouters_database_corruption( side_effect=sqlite3_exception, ), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) @@ -197,12 +214,11 @@ async def test_purge_old_states_encouters_database_corruption( async def test_purge_old_states_encounters_temporary_mysql_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, caplog: pytest.LogCaptureFixture, ) -> None: """Test retry on specific mysql operational errors.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -217,9 +233,9 @@ async def test_purge_old_states_encounters_temporary_mysql_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=[mysql_exception, None], ), - patch.object(instance.engine.dialect, "name", "mysql"), + patch.object(recorder_mock.engine.dialect, "name", "mysql"), ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -228,13 +244,12 @@ async def test_purge_old_states_encounters_temporary_mysql_error( assert sleep_mock.called +@pytest.mark.usefixtures("recorder_mock") async def test_purge_old_states_encounters_operational_error( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, ) -> None: """Test error on operational errors that are not mysql does not retry.""" - await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_states(hass) @@ -246,7 +261,7 @@ async def test_purge_old_states_encounters_operational_error( "homeassistant.components.recorder.purge._purge_old_recorder_runs", side_effect=exception, ): - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, {"keep_days": 0}) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, {"keep_days": 0}) await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -255,11 +270,8 @@ async def test_purge_old_states_encounters_operational_error( assert "Error executing purge" in caplog.text -async def test_purge_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: +async def test_purge_old_events(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_events(hass) @@ -270,34 +282,39 @@ async def test_purge_old_events( purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 2 - # we should only have 2 events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + # we should only have 2 events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 2 async def test_purge_old_recorder_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old recorder runs keeps current run.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_recorder_runs(hass) @@ -307,34 +324,36 @@ async def test_purge_old_recorder_runs( recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert not finished - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, - ) - assert finished + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + assert finished + + with session_scope(hass=hass) as session: + recorder_runs = session.query(RecorderRuns) assert recorder_runs.count() == 1 async def test_purge_old_statistics_runs( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old statistics runs keeps the latest run.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await _add_test_statistics_runs(hass) @@ -344,20 +363,23 @@ async def test_purge_old_statistics_runs( statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 7 - purge_before = dt_util.utcnow() + purge_before = dt_util.utcnow() - # run purge_old_data() - finished = purge_old_data(instance, purge_before, repack=False) - assert not finished + # run purge_old_data() + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert not finished - finished = purge_old_data(instance, purge_before, repack=False) - assert finished + finished = purge_old_data(recorder_mock, purge_before, repack=False) + assert finished + + with session_scope(hass=hass) as session: + statistics_runs = session.query(StatisticsRuns) assert statistics_runs.count() == 1 @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) +@pytest.mark.usefixtures("recorder_mock") async def test_purge_method( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, caplog: pytest.LogCaptureFixture, use_sqlite: bool, @@ -375,7 +397,6 @@ async def test_purge_method( assert run1.run_id == run2.run_id assert run1.start == run2.start - await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) service_data = {"keep_days": 4} @@ -476,11 +497,8 @@ async def test_purge_method( @pytest.mark.parametrize("use_sqlite", [True, False], indirect=True) -async def test_purge_edge_case( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, - use_sqlite: bool, -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_purge_edge_case(hass: HomeAssistant, use_sqlite: bool) -> None: """Test states and events are purged even if they occurred shortly before purge_before.""" async def _add_db_entries(hass: HomeAssistant, timestamp: datetime) -> None: @@ -513,7 +531,6 @@ async def test_purge_edge_case( ) ) - await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -532,7 +549,7 @@ async def test_purge_edge_case( events = session.query(Events).filter(Events.event_type == "EVENT_TEST_PURGE") assert events.count() == 1 - await hass.services.async_call(recorder.DOMAIN, SERVICE_PURGE, service_data) + await hass.services.async_call(RECORDER_DOMAIN, SERVICE_PURGE, service_data) await hass.async_block_till_done() await async_recorder_block_till_done(hass) @@ -545,10 +562,7 @@ async def test_purge_edge_case( assert events.count() == 0 -async def test_purge_cutoff_date( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, -) -> None: +async def test_purge_cutoff_date(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test states and events are purged only if they occurred before "now() - keep_days".""" async def _add_db_entries(hass: HomeAssistant, cutoff: datetime, rows: int) -> None: @@ -612,7 +626,6 @@ async def test_purge_cutoff_date( ) ) - instance = await async_setup_recorder_instance(hass, None) await async_attach_db_engine(hass) await async_wait_purge_done(hass) @@ -641,7 +654,7 @@ async def test_purge_cutoff_date( assert events.filter(Events.event_type == "PURGE").count() == rows - 1 assert events.filter(Events.event_type == "KEEP").count() == 1 - instance.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) + recorder_mock.queue_task(PurgeTask(cutoff, repack=False, apply_filter=False)) await hass.async_block_till_done() await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -672,7 +685,9 @@ async def test_purge_cutoff_date( assert events.filter(Events.event_type == "KEEP").count() == 1 # Make sure we can purge everything - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -683,7 +698,9 @@ async def test_purge_cutoff_date( assert state_attributes.count() == 0 # Make sure we can purge everything when the db is already empty - instance.queue_task(PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False)) + recorder_mock.queue_task( + PurgeTask(dt_util.utcnow(), repack=False, apply_filter=False) + ) await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) @@ -936,16 +953,15 @@ def _add_state_and_state_changed_event( async def test_purge_many_old_events( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test deleting old events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) old_events_count = 5 with ( - patch.object(instance, "max_bind_vars", old_events_count), - patch.object(instance.database_engine, "max_bind_vars", old_events_count), + patch.object(recorder_mock, "max_bind_vars", old_events_count), + patch.object(recorder_mock.database_engine, "max_bind_vars", old_events_count), ): await _add_test_events(hass, old_events_count) @@ -953,60 +969,70 @@ async def test_purge_many_old_events( events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 6 - purge_before = dt_util.utcnow() - timedelta(days=4) + purge_before = dt_util.utcnow() - timedelta(days=4) - # run purge_old_data() - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert not finished + # run purge_old_data() + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert not finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 3 - # we should only have 2 groups of events left - finished = purge_old_data( - instance, - purge_before, - repack=False, - states_batch_size=3, - events_batch_size=3, - ) - assert finished + # we should only have 2 groups of events left + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + states_batch_size=3, + events_batch_size=3, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == old_events_count * 2 - # we should now purge everything - finished = purge_old_data( - instance, - dt_util.utcnow(), - repack=False, - states_batch_size=20, - events_batch_size=20, - ) - assert finished + # we should now purge everything + finished = purge_old_data( + recorder_mock, + dt_util.utcnow(), + repack=False, + states_batch_size=20, + events_batch_size=20, + ) + assert finished + + with session_scope(hass=hass) as session: + events = session.query(Events).filter(Events.event_type.like("EVENT_TEST%")) assert events.count() == 0 async def test_purge_can_mix_legacy_and_new_format( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging with legacy and new events.""" - instance = await async_setup_recorder_instance(hass) await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert instance.use_legacy_events_index is False + assert recorder_mock.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index(instance.get_session, "states", "ix_states_event_id") - instance.use_legacy_events_index = True + migration._create_index( + recorder_mock.get_session, "states", "ix_states_event_id" + ) + recorder_mock.use_legacy_events_index = True - await instance.async_add_executor_job(_recreate_legacy_events_index) - assert instance.use_legacy_events_index is True + await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) + assert recorder_mock.use_legacy_events_index is True utcnow = dt_util.utcnow() eleven_days_ago = utcnow - timedelta(days=11) @@ -1045,39 +1071,65 @@ async def test_purge_can_mix_legacy_and_new_format( assert states_with_event_id.count() == 50 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1085,41 +1137,53 @@ async def test_purge_can_mix_legacy_and_new_format( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - finished = purge_old_data( - instance, - purge_before, - repack=False, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_purge_can_mix_legacy_and_new_format_with_detached_state( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, + recorder_mock: Recorder, recorder_db_url: str, ) -> None: - """Test purging with legacy and new events with a detached state.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - return pytest.skip("This tests disables foreign key checks on SQLite") + """Test purging with legacy and new events with a detached state. - instance = await async_setup_recorder_instance(hass) + This tests disables foreign key checks on SQLite. + """ await async_attach_db_engine(hass) await async_wait_recording_done(hass) # New databases are no longer created with the legacy events index - assert instance.use_legacy_events_index is False + assert recorder_mock.use_legacy_events_index is False def _recreate_legacy_events_index(): """Recreate the legacy events index since its no longer created on new instances.""" - migration._create_index(instance.get_session, "states", "ix_states_event_id") - instance.use_legacy_events_index = True + migration._create_index( + recorder_mock.get_session, "states", "ix_states_event_id" + ) + recorder_mock.use_legacy_events_index = True - await instance.async_add_executor_job(_recreate_legacy_events_index) - assert instance.use_legacy_events_index is True + await recorder_mock.async_add_executor_job(_recreate_legacy_events_index) + assert recorder_mock.use_legacy_events_index is True with session_scope(hass=hass) as session: session.execute(text("PRAGMA foreign_keys = OFF")) @@ -1189,39 +1253,65 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( assert states_with_event_id.count() == 52 assert states_without_event_id.count() == 51 - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 51 - # At this point all the legacy states are gone - # and we switch methods - purge_before = dt_util.utcnow() - timedelta(days=4) - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=1, - states_batch_size=1, + + # At this point all the legacy states are gone + # and we switch methods + purge_before = dt_util.utcnow() - timedelta(days=4) + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=1, + states_batch_size=1, + ) + # Since we only allow one iteration, we won't + # check if we are finished this loop similar + # to the legacy method + assert not finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - # Since we only allow one iteration, we won't - # check if we are finished this loop similar - # to the legacy method - assert not finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 - finished = purge_old_data( - instance, - purge_before, - repack=False, - events_batch_size=100, - states_batch_size=100, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + events_batch_size=100, + states_batch_size=100, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 1 _add_state_without_event_linkage( @@ -1229,12 +1319,21 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( ) assert states_with_event_id.count() == 0 assert states_without_event_id.count() == 2 - finished = purge_old_data( - instance, - purge_before, - repack=False, + + finished = purge_old_data( + recorder_mock, + purge_before, + repack=False, + ) + assert finished + + with session_scope(hass=hass) as session: + states_with_event_id = session.query(States).filter( + States.event_id.is_not(None) + ) + states_without_event_id = session.query(States).filter( + States.event_id.is_(None) ) - assert finished # The broken state without a timestamp # does not prevent future purges. Its ignored. assert states_with_event_id.count() == 0 @@ -1242,11 +1341,9 @@ async def test_purge_can_mix_legacy_and_new_format_with_detached_state( async def test_purge_entities_keep_days( - async_setup_recorder_instance: RecorderInstanceGenerator, - hass: HomeAssistant, + hass: HomeAssistant, recorder_mock: Recorder ) -> None: """Test purging states with an entity filter and keep_days.""" - instance = await async_setup_recorder_instance(hass, {}) await async_attach_db_engine(hass) await hass.async_block_till_done() @@ -1270,7 +1367,7 @@ async def test_purge_entities_keep_days( hass.states.async_set("sensor.keep", "now") await async_recorder_block_till_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1281,7 +1378,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 3 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1291,7 +1388,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, @@ -1302,7 +1399,7 @@ async def test_purge_entities_keep_days( assert len(states["sensor.purge"]) == 1 await hass.services.async_call( - recorder.DOMAIN, + RECORDER_DOMAIN, SERVICE_PURGE_ENTITIES, { "entity_id": "sensor.purge", @@ -1311,7 +1408,7 @@ async def test_purge_entities_keep_days( await async_recorder_block_till_done(hass) await async_wait_purge_done(hass) - states = await instance.async_add_executor_job( + states = await recorder_mock.async_add_executor_job( get_significant_states, hass, one_month_ago, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 7d8bc6e3415..5cbb29afc91 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -1,7 +1,8 @@ """The tests for sensor recorder platform.""" from datetime import timedelta -from unittest.mock import patch +from typing import Any +from unittest.mock import ANY, Mock, patch import pytest from sqlalchemy import select @@ -15,17 +16,21 @@ from homeassistant.components.recorder.models import ( ) from homeassistant.components.recorder.statistics import ( STATISTIC_UNIT_TO_UNIT_CONVERTER, + PlatformCompiledStatistics, _generate_max_mean_min_statistic_in_sub_period_stmt, _generate_statistics_at_time_stmt, _generate_statistics_during_period_stmt, async_add_external_statistics, async_import_statistics, + async_list_statistic_ids, get_last_short_term_statistics, get_last_statistics, get_latest_short_term_statistics_with_session, get_metadata, + get_metadata_with_session, get_short_term_statistics_run_cache, list_statistic_ids, + validate_statistics, ) from homeassistant.components.recorder.table_managers.statistics_meta import ( _generate_get_metadata_stmt, @@ -41,17 +46,20 @@ import homeassistant.util.dt as dt_util from .common import ( assert_dict_of_states_equal_without_context_and_last_changed, async_record_states, + async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, + get_start_time, statistics_during_period, ) +from tests.common import MockPlatform, mock_platform from tests.typing import RecorderInstanceGenerator, WebSocketGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -61,6 +69,15 @@ def setup_recorder(recorder_mock: Recorder) -> None: """Set up recorder.""" +async def _setup_mock_domain( + hass: HomeAssistant, + platform: Any | None = None, # There's no RecorderPlatform class yet +) -> None: + """Set up a mock domain.""" + mock_platform(hass, "some_domain.recorder", platform or MockPlatform()) + assert await async_setup_component(hass, "some_domain", {}) + + def test_converters_align_with_sensor() -> None: """Ensure STATISTIC_UNIT_TO_UNIT_CONVERTER is aligned with UNIT_CONVERTERS.""" for converter in UNIT_CONVERTERS.values(): @@ -293,14 +310,17 @@ def mock_sensor_statistics(): } def get_fake_stats(_hass, session, start, _end): + instance = recorder.get_instance(_hass) return statistics.PlatformCompiledStatistics( [ sensor_stats("sensor.test1", start), sensor_stats("sensor.test2", start), sensor_stats("sensor.test3", start), ], - get_metadata( - _hass, statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"} + get_metadata_with_session( + instance, + session, + statistic_ids={"sensor.test1", "sensor.test2", "sensor.test3"}, ), ) @@ -338,7 +358,7 @@ async def test_compile_periodic_statistics_exception( """Test exception handling when compiling periodic statistics.""" await async_setup_component(hass, "sensor", {}) - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) do_adhoc_statistics(hass, start=now) do_adhoc_statistics(hass, start=now + timedelta(minutes=5)) await async_wait_recording_done(hass) @@ -2468,3 +2488,151 @@ async def test_change_with_none( types={"change"}, ) assert stats == {} + + +async def test_recorder_platform_with_statistics( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test recorder platform.""" + instance = recorder.get_instance(hass) + recorder_data = hass.data["recorder"] + assert not recorder_data.recorder_platforms + + def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: + return PlatformCompiledStatistics([], {}) + + def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: + return {} + + def _mock_validate_statistics(*args: Any) -> dict: + return {} + + recorder_platform = Mock( + compile_statistics=Mock(wraps=_mock_compile_statistics), + list_statistic_ids=Mock(wraps=_mock_list_statistic_ids), + validate_statistics=Mock(wraps=_mock_validate_statistics), + ) + + await _setup_mock_domain(hass, recorder_platform) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} + + recorder_platform.compile_statistics.assert_not_called() + recorder_platform.list_statistic_ids.assert_not_called() + recorder_platform.validate_statistics.assert_not_called() + + # Test compile statistics + zero = get_start_time(dt_util.utcnow()) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + + recorder_platform.compile_statistics.assert_called_once_with( + hass, ANY, zero, zero + timedelta(minutes=5) + ) + recorder_platform.list_statistic_ids.assert_not_called() + recorder_platform.validate_statistics.assert_not_called() + + # Test list statistic IDs + await async_list_statistic_ids(hass) + recorder_platform.compile_statistics.assert_called_once() + recorder_platform.list_statistic_ids.assert_called_once_with( + hass, statistic_ids=None, statistic_type=None + ) + recorder_platform.validate_statistics.assert_not_called() + + # Test validate statistics + await instance.async_add_executor_job( + validate_statistics, + hass, + ) + recorder_platform.compile_statistics.assert_called_once() + recorder_platform.list_statistic_ids.assert_called_once() + recorder_platform.validate_statistics.assert_called_once_with(hass) + + +async def test_recorder_platform_without_statistics( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test recorder platform.""" + recorder_data = hass.data["recorder"] + assert recorder_data.recorder_platforms == {} + + await _setup_mock_domain(hass) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {} + + +@pytest.mark.parametrize( + "supported_methods", + [ + ("compile_statistics",), + ("list_statistic_ids",), + ("validate_statistics",), + ], +) +async def test_recorder_platform_with_partial_statistics_support( + hass: HomeAssistant, + setup_recorder: None, + caplog: pytest.LogCaptureFixture, + supported_methods: tuple[str, ...], +) -> None: + """Test recorder platform.""" + instance = recorder.get_instance(hass) + recorder_data = hass.data["recorder"] + assert not recorder_data.recorder_platforms + + def _mock_compile_statistics(*args: Any) -> PlatformCompiledStatistics: + return PlatformCompiledStatistics([], {}) + + def _mock_list_statistic_ids(*args: Any, **kwargs: Any) -> dict: + return {} + + def _mock_validate_statistics(*args: Any) -> dict: + return {} + + mock_impl = { + "compile_statistics": _mock_compile_statistics, + "list_statistic_ids": _mock_list_statistic_ids, + "validate_statistics": _mock_validate_statistics, + } + + kwargs = {meth: Mock(wraps=mock_impl[meth]) for meth in supported_methods} + + recorder_platform = Mock( + spec=supported_methods, + **kwargs, + ) + + await _setup_mock_domain(hass, recorder_platform) + + # Wait for the sensor recorder platform to be added + await async_recorder_block_till_done(hass) + assert recorder_data.recorder_platforms == {"some_domain": recorder_platform} + + for meth in supported_methods: + getattr(recorder_platform, meth).assert_not_called() + + # Test compile statistics + zero = get_start_time(dt_util.utcnow()) + do_adhoc_statistics(hass, start=zero) + await async_wait_recording_done(hass) + + # Test list statistic IDs + await async_list_statistic_ids(hass) + + # Test validate statistics + await instance.async_add_executor_job( + validate_statistics, + hass, + ) + + for meth in supported_methods: + getattr(recorder_platform, meth).assert_called_once() diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index af784692612..dfa87fc9391 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -15,7 +15,7 @@ from unittest.mock import patch import pytest from homeassistant.components import recorder -from homeassistant.components.recorder import SQLITE_URL_PREFIX, get_instance +from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.util import session_scope from homeassistant.helpers import recorder as recorder_helper from homeassistant.setup import setup_component @@ -34,13 +34,16 @@ SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) -def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +def test_delete_duplicates( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -176,7 +179,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -204,7 +207,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> # Test that the duplicates are removed during migration from schema 23 with get_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -215,15 +218,16 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> assert "Found duplicated" not in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) def test_delete_duplicates_many( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -359,7 +363,7 @@ def test_delete_duplicates_many( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -393,7 +397,7 @@ def test_delete_duplicates_many( # Test that the duplicates are removed during migration from schema 23 with get_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -405,15 +409,16 @@ def test_delete_duplicates_many( @pytest.mark.freeze_time("2021-08-01 00:00:00+00:00") +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) def test_delete_duplicates_non_identical( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -519,7 +524,7 @@ def test_delete_duplicates_non_identical( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -543,7 +548,7 @@ def test_delete_duplicates_non_identical( with get_test_home_assistant() as hass: hass.config.config_dir = tmp_path recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) @@ -589,15 +594,16 @@ def test_delete_duplicates_non_identical( ] +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") def test_delete_duplicates_short_term( - caplog: pytest.LogCaptureFixture, tmp_path: Path + recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: - """Test removal of duplicated statistics.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" + """Test removal of duplicated statistics. + The test only works with SQLite. + """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -634,7 +640,7 @@ def test_delete_duplicates_short_term( get_test_home_assistant() as hass, ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) wait_recording_done(hass) wait_recording_done(hass) @@ -657,7 +663,7 @@ def test_delete_duplicates_short_term( with get_test_home_assistant() as hass: hass.config.config_dir = tmp_path recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) hass.start() wait_recording_done(hass) wait_recording_done(hass) diff --git a/tests/components/recorder/test_system_health.py b/tests/components/recorder/test_system_health.py index fbcefa0b13e..0efaa82e5e5 100644 --- a/tests/components/recorder/test_system_health.py +++ b/tests/components/recorder/test_system_health.py @@ -15,13 +15,15 @@ from tests.common import get_system_health_info from tests.typing import RecorderInstanceGenerator +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health( recorder_mock: Recorder, hass: HomeAssistant, recorder_db_url: str ) -> None: - """Test recorder system health.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test recorder system health. + + This test is specific for SQLite. + """ assert await async_setup_component(hass, "system_health", {}) await async_wait_recording_done(hass) @@ -100,15 +102,17 @@ async def test_recorder_system_health_db_url_missing_host( } +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_recorder_system_health_crashed_recorder_runs_table( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, recorder_db_url: str, ) -> None: - """Test recorder system health with crashed recorder runs table.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test recorder system health with crashed recorder runs table. + + This test is specific for SQLite. + """ with patch( "homeassistant.components.recorder.table_managers.recorder_runs.RecorderRunsManager.load_from_db" diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index d72978c57bb..04fe762c780 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -26,6 +26,8 @@ from homeassistant.components.recorder.models import ( process_timestamp, ) from homeassistant.components.recorder.util import ( + MIN_VERSION_SQLITE, + UPCOMING_MIN_VERSION_SQLITE, end_incomplete_runs, is_second_sunday, resolve_period, @@ -48,7 +50,7 @@ from tests.typing import RecorderInstanceGenerator @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder.""" @@ -116,12 +118,18 @@ def test_validate_or_move_away_sqlite_database( assert util.validate_or_move_away_sqlite_database(dburl) is True +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_last_run_was_recently_clean( - async_setup_recorder_instance: RecorderInstanceGenerator, tmp_path: Path + async_setup_recorder_instance: RecorderInstanceGenerator, ) -> None: - """Test we can check if the last recorder run was recently clean.""" + """Test we can check if the last recorder run was recently clean. + + This is only implemented for SQLite. + """ config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"), recorder.CONF_COMMIT_INTERVAL: 1, } async with async_test_home_assistant() as hass: @@ -217,9 +225,9 @@ def test_setup_connection_for_dialect_mysql(mysql_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) -def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: +def test_setup_connection_for_dialect_sqlite(sqlite_version: str) -> None: """Test setting up the connection for a sqlite dialect.""" instance_mock = MagicMock() execute_args = [] @@ -270,10 +278,10 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) def test_setup_connection_for_dialect_sqlite_zero_commit_interval( - sqlite_version, + sqlite_version: str, ) -> None: """Test setting up the connection for a sqlite dialect with a zero commit interval.""" instance_mock = MagicMock(commit_interval=0) @@ -497,10 +505,6 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non "2.0.0", "Version 2.0.0 of SQLite is not supported; minimum supported version is 3.31.0.", ), - ( - "dogs", - "Version dogs of SQLite is not supported; minimum supported version is 3.31.0.", - ), ], ) def test_fail_outdated_sqlite( @@ -719,14 +723,72 @@ async def test_no_issue_for_mariadb_with_MDEV_25020( assert database_engine.optimizer.slow_range_in_select is False +async def test_issue_for_old_sqlite( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create and delete an issue for old sqlite versions.""" + instance_mock = MagicMock() + instance_mock.hass = hass + execute_args = [] + close_mock = MagicMock() + min_version = str(MIN_VERSION_SQLITE) + + def execute_mock(statement): + nonlocal execute_args + execute_args.append(statement) + + def fetchall_mock(): + nonlocal execute_args + if execute_args[-1] == "SELECT sqlite_version()": + return [[min_version]] + return None + + def _make_cursor_mock(*_): + return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) + + dbapi_connection = MagicMock(cursor=_make_cursor_mock) + + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is not None + assert issue.translation_placeholders == { + "min_version": str(UPCOMING_MIN_VERSION_SQLITE), + "server_version": min_version, + } + + min_version = str(UPCOMING_MIN_VERSION_SQLITE) + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is None + assert database_engine is not None + + +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_basic_sanity_check( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test the basic sanity checks with a missing table.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test the basic sanity checks with a missing table. + This test is specific for SQLite. + """ cursor = util.get_instance(hass).engine.raw_connection().cursor() assert util.basic_sanity_check(cursor) is True @@ -737,17 +799,18 @@ async def test_basic_sanity_check( util.basic_sanity_check(cursor) +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_combined_checks( hass: HomeAssistant, setup_recorder: None, caplog: pytest.LogCaptureFixture, recorder_db_url: str, ) -> None: - """Run Checks on the open database.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Run Checks on the open database. + This test is specific for SQLite. + """ instance = util.get_instance(hass) instance.db_retry_wait = 0 @@ -829,14 +892,15 @@ async def test_end_incomplete_runs( assert "Ended unfinished session" in caplog.text +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") async def test_periodic_db_cleanups( hass: HomeAssistant, setup_recorder: None, recorder_db_url: str ) -> None: - """Test periodic db cleanups.""" - if recorder_db_url.startswith(("mysql://", "postgresql://")): - # This test is specific for SQLite - return + """Test periodic db cleanups. + This test is specific for SQLite. + """ with patch.object(util.get_instance(hass).engine, "connect") as connect_mock: util.periodic_db_cleanups(util.get_instance(hass)) @@ -847,17 +911,22 @@ async def test_periodic_db_cleanups( assert str(text_obj) == "PRAGMA wal_checkpoint(TRUNCATE);" +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.parametrize("persistent_database", [True]) async def test_write_lock_db( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant, - tmp_path: Path, + recorder_db_url: str, ) -> None: - """Test database write lock.""" + """Test database write lock. - # Use file DB, in memory DB cannot do write locks. - config = { - recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db?timeout=0.1") - } + This is only supported for SQLite. + + Use file DB, in memory DB cannot do write locks. + """ + + config = {recorder.CONF_DB_URL: recorder_db_url + "?timeout=0.1"} instance = await async_setup_recorder_instance(hass, config) await hass.async_block_till_done() diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index e3398fbf0e3..9956fec8a09 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -2,7 +2,6 @@ from datetime import timedelta import importlib -from pathlib import Path import sys from unittest.mock import patch @@ -11,17 +10,17 @@ from sqlalchemy import create_engine, inspect from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import SQLITE_URL_PREFIX, core, statistics +from homeassistant.components.recorder import core, migration, statistics from homeassistant.components.recorder.queries import select_event_type_ids from homeassistant.components.recorder.util import session_scope -from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import async_setup_component +from homeassistant.const import EVENT_STATE_CHANGED +from homeassistant.core import Event, EventOrigin, State import homeassistant.util.dt as dt_util from .common import async_wait_recording_done from tests.common import async_test_home_assistant +from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" SCHEMA_MODULE = "tests.components.recorder.db_schema_32" @@ -49,13 +48,16 @@ def _create_engine_test(*args, **kwargs): return engine -async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None: +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_times( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: """Test we can migrate times.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() @@ -94,37 +96,26 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventsContextIDMigration, "migrate_data"), + patch.object(migration.StatesContextIDMigration, "migrate_data"), + patch.object(migration.EventTypeIDMigration, "migrate_data"), + patch.object(migration.EntityIDMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -134,15 +125,15 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await recorder.get_instance(hass).async_add_executor_job(_add_data) + await instance.async_add_executor_job(_add_data) await hass.async_block_till_done() - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() - states_indexes = await recorder.get_instance(hass).async_add_executor_job( + states_indexes = await instance.async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert recorder.get_instance(hass).use_legacy_events_index is True + assert instance.use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -150,17 +141,16 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - assert "ix_states_event_id" in states_index_names # Test that the duplicates are removed during migration from schema 23 - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() await async_wait_recording_done(hass) def _get_test_data_from_db(): @@ -184,9 +174,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - session.expunge_all() return events_result, states_result - events_result, states_result = await recorder.get_instance( - hass - ).async_add_executor_job(_get_test_data_from_db) + events_result, states_result = await instance.async_add_executor_job( + _get_test_data_from_db + ) assert len(events_result) == 1 assert events_result[0].time_fired_ts == now_timestamp @@ -198,37 +188,32 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - with session_scope(hass=hass) as session: return inspect(session.connection()).get_indexes("events") - events_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_events_index_names - ) + events_indexes = await instance.async_add_executor_job(_get_events_index_names) events_index_names = {index["name"] for index in events_indexes} assert "ix_events_context_id_bin" in events_index_names assert "ix_events_context_id" not in events_index_names - states_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_states_index_names - ) + states_indexes = await instance.async_add_executor_job(_get_states_index_names) states_index_names = {index["name"] for index in states_indexes} # sqlite does not support dropping foreign keys so we had to # create a new table and copy the data over assert "ix_states_event_id" not in states_index_names - assert recorder.get_instance(hass).use_legacy_events_index is False + assert instance.use_legacy_events_index is False await hass.async_stop() +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( - caplog: pytest.LogCaptureFixture, tmp_path: Path + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, ) -> None: """Test we resume the entity id post migration after a restart.""" - test_dir = tmp_path.joinpath("sqlite") - test_dir.mkdir() - test_db_file = test_dir.joinpath("test_run_info.db") - dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}" - importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] now = dt_util.utcnow() @@ -273,28 +258,15 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test), - patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", - ), - patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", - ), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( - "homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids" + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" ), ): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() await async_wait_recording_done(hass) await async_wait_recording_done(hass) @@ -304,15 +276,15 @@ async def test_migrate_can_resume_entity_id_post_migration( session.add(old_db_schema.Events.from_event(custom_event)) session.add(old_db_schema.States.from_event(state_changed_event)) - await recorder.get_instance(hass).async_add_executor_job(_add_data) + await instance.async_add_executor_job(_add_data) await hass.async_block_till_done() - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() - states_indexes = await recorder.get_instance(hass).async_add_executor_job( + states_indexes = await instance.async_add_executor_job( _get_states_index_names ) states_index_names = {index["name"] for index in states_indexes} - assert recorder.get_instance(hass).use_legacy_events_index is True + assert instance.use_legacy_events_index is True await hass.async_stop() await hass.async_block_till_done() @@ -320,44 +292,155 @@ async def test_migrate_can_resume_entity_id_post_migration( assert "ix_states_event_id" in states_index_names assert "ix_states_entity_id_last_updated_ts" in states_index_names - with patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"): - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) - await hass.async_block_till_done() - - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() - await async_wait_recording_done(hass) - - states_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - await hass.async_stop() - await hass.async_block_till_done() - - async with async_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, "recorder", {"recorder": {"db_url": dburl}} - ) + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): await hass.async_block_till_done() # We need to wait for all the migration tasks to complete # before we can check the database. for _ in range(number_of_migrations): - await recorder.get_instance(hass).async_block_till_done() + await instance.async_block_till_done() await async_wait_recording_done(hass) - states_indexes = await recorder.get_instance(hass).async_add_executor_job( - _get_states_index_names - ) + states_indexes = await instance.async_add_executor_job(_get_states_index_names) states_index_names = {index["name"] for index in states_indexes} assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + + await hass.async_stop() + + +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_can_resume_ix_states_event_id_removed( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test we resume the entity id post migration after a restart. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None await hass.async_stop() diff --git a/tests/components/recorder/test_websocket_api.py b/tests/components/recorder/test_websocket_api.py index cc187a1e6ad..8efbf226bc1 100644 --- a/tests/components/recorder/test_websocket_api.py +++ b/tests/components/recorder/test_websocket_api.py @@ -3,7 +3,7 @@ import datetime from datetime import timedelta from statistics import fmean -import threading +import sys from unittest.mock import ANY, patch from freezegun import freeze_time @@ -35,11 +35,21 @@ from .common import ( async_wait_recording_done, create_engine_test, do_adhoc_statistics, + get_start_time, statistics_during_period, ) +from .conftest import InstrumentedMigration from tests.common import async_fire_time_changed -from tests.typing import WebSocketGenerator +from tests.typing import RecorderInstanceGenerator, WebSocketGenerator + + +@pytest.fixture +async def mock_recorder_before_hass( + async_setup_recorder_instance: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + DISTANCE_SENSOR_FT_ATTRIBUTES = { "device_class": "distance", @@ -146,12 +156,17 @@ async def test_statistics_during_period( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", 10, attributes=POWER_SENSOR_KW_ATTRIBUTES) + hass.states.async_set( + "sensor.test", + 10, + attributes=POWER_SENSOR_KW_ATTRIBUTES, + timestamp=now.timestamp(), + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -599,7 +614,12 @@ async def test_statistic_during_period( } # Test we can automatically convert units - hass.states.async_set("sensor.test", None, attributes=ENERGY_SENSOR_WH_ATTRIBUTES) + hass.states.async_set( + "sensor.test", + None, + attributes=ENERGY_SENSOR_WH_ATTRIBUTES, + timestamp=now.timestamp(), + ) await client.send_json_auto_id( { "type": "recorder/statistic_during_period", @@ -810,7 +830,7 @@ async def test_statistic_during_period_partial_overlap( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, freezer: FrozenDateTimeFactory, - frozen_time: datetime, + frozen_time: datetime.datetime, ) -> None: """Test statistic_during_period.""" client = await hass_ws_client() @@ -1256,11 +1276,13 @@ async def test_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1341,12 +1363,16 @@ async def test_sum_statistics_during_period_unit_conversion( converted_value, ) -> None: """Test statistics_during_period.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", 0, attributes=attributes) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", 0, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1462,7 +1488,7 @@ async def test_statistics_during_period_in_the_past( ) -> None: """Test statistics_during_period in the past.""" await hass.config.async_set_time_zone("UTC") - now = dt_util.utcnow().replace() + now = get_start_time(dt_util.utcnow()) hass.config.units = US_CUSTOMARY_SYSTEM await async_setup_component(hass, "sensor", {}) @@ -1717,7 +1743,7 @@ async def test_list_statistic_ids( unit_class, ) -> None: """Test list_statistic_ids.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1731,7 +1757,9 @@ async def test_list_statistic_ids( assert response["success"] assert response["result"] == [] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) @@ -1881,7 +1909,7 @@ async def test_list_statistic_ids_unit_change( unit_class, ) -> None: """Test list_statistic_ids.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -1894,7 +1922,9 @@ async def test_list_statistic_ids_unit_change( assert response["success"] assert response["result"] == [] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -1917,7 +1947,9 @@ async def test_list_statistic_ids_unit_change( ] # Change the state unit - hass.states.async_set("sensor.test", 10, attributes=attributes2) + hass.states.async_set( + "sensor.test", 10, attributes=attributes2, timestamp=now.timestamp() + ) await client.send_json_auto_id({"type": "recorder/list_statistic_ids"}) response = await client.receive_json() @@ -1956,7 +1988,7 @@ async def test_clear_statistics( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test removing statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES @@ -1966,9 +1998,15 @@ async def test_clear_statistics( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test1", state, attributes=attributes) - hass.states.async_set("sensor.test2", state * 2, attributes=attributes) - hass.states.async_set("sensor.test3", state * 3, attributes=attributes) + hass.states.async_set( + "sensor.test1", state, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test2", state * 2, attributes=attributes, timestamp=now.timestamp() + ) + hass.states.async_set( + "sensor.test3", state * 3, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, start=now) @@ -2079,7 +2117,7 @@ async def test_update_statistics_metadata( new_display_unit, ) -> None: """Test removing statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2088,7 +2126,9 @@ async def test_update_statistics_metadata( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2168,7 +2208,7 @@ async def test_change_statistics_unit( recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test change unit of recorded statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2177,7 +2217,9 @@ async def test_change_statistics_unit( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2313,7 +2355,7 @@ async def test_change_statistics_unit_errors( caplog: pytest.LogCaptureFixture, ) -> None: """Test change unit of recorded statistics.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) units = METRIC_SYSTEM attributes = POWER_SENSOR_KW_ATTRIBUTES | {"device_class": None} @@ -2367,7 +2409,9 @@ async def test_change_statistics_unit_errors( hass.config.units = units await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) - hass.states.async_set("sensor.test", state, attributes=attributes) + hass.states.async_set( + "sensor.test", state, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) do_adhoc_statistics(hass, period="hourly", start=now) @@ -2457,7 +2501,7 @@ async def test_recorder_info_bad_recorder_config( client = await hass_ws_client() - with patch("homeassistant.components.recorder.migration.migrate_schema"): + with patch("homeassistant.components.recorder.migration._migrate_schema"): recorder_helper.async_initialize_recorder(hass) assert not await async_setup_component( hass, recorder.DOMAIN, {recorder.DOMAIN: config} @@ -2482,7 +2526,7 @@ async def test_recorder_info_no_instance( client = await hass_ws_client() with patch( - "homeassistant.components.recorder.websocket_api.get_instance", + "homeassistant.components.recorder.basic_websocket_api.get_instance", return_value=None, ): await client.send_json_auto_id({"type": "recorder/info"}) @@ -2493,70 +2537,60 @@ async def test_recorder_info_no_instance( async def test_recorder_info_migration_queue_exhausted( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + async_test_recorder: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, ) -> None: """Test getting recorder status when recorder queue is exhausted.""" assert recorder.util.async_migration_in_progress(hass) is False - migration_done = threading.Event() - - real_migration = recorder.migration._apply_update - - def stalled_migration(*args): - """Make migration stall.""" - nonlocal migration_done - migration_done.wait() - return real_migration(*args) - with ( - patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), - patch("homeassistant.components.recorder.Recorder.async_periodic_statistics"), patch( "homeassistant.components.recorder.core.create_engine", new=create_engine_test, ), patch.object(recorder.core, "MAX_QUEUE_BACKLOG_MIN_VALUE", 1), - patch.object(recorder.core, "QUEUE_PERCENTAGE_ALLOWED_AVAILABLE_MEMORY", 0), - patch( - "homeassistant.components.recorder.migration._apply_update", - wraps=stalled_migration, + patch.object( + recorder.core, "MIN_AVAILABLE_MEMORY_FOR_QUEUE_BACKLOG", sys.maxsize ), ): - recorder_helper.async_initialize_recorder(hass) - hass.create_task( - async_setup_component( - hass, "recorder", {"recorder": {"db_url": "sqlite://"}} + async with async_test_recorder( + hass, wait_recorder=False, wait_recorder_setup=False + ): + await hass.async_add_executor_job( + instrument_migration.migration_started.wait ) - ) - await recorder_helper.async_wait_recorder(hass) - hass.states.async_set("my.entity", "on", {}) - await hass.async_block_till_done() + assert recorder.util.async_migration_in_progress(hass) is True + await recorder_helper.async_wait_recorder(hass) + hass.states.async_set("my.entity", "on", {}) + await hass.async_block_till_done() - # Detect queue full - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) - await hass.async_block_till_done() + # Detect queue full + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(hours=2)) + await hass.async_block_till_done() - client = await hass_ws_client() + client = await hass_ws_client() - # Check the status - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is True - assert response["result"]["recording"] is False - assert response["result"]["thread_running"] is True + # Check the status + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is True + assert response["result"]["recording"] is False + assert response["result"]["thread_running"] is True - # Let migration finish - migration_done.set() - await async_wait_recording_done(hass) + # Let migration finish + instrument_migration.migration_stall.set() + await async_wait_recording_done(hass) - # Check the status after migration finished - await client.send_json_auto_id({"type": "recorder/info"}) - response = await client.receive_json() - assert response["success"] - assert response["result"]["migration_in_progress"] is False - assert response["result"]["recording"] is True - assert response["result"]["thread_running"] is True + # Check the status after migration finished + await client.send_json_auto_id({"type": "recorder/info"}) + response = await client.receive_json() + assert response["success"] + assert response["result"]["migration_in_progress"] is False + assert response["result"]["recording"] is True + assert response["result"]["thread_running"] is True async def test_backup_start_no_recorder( @@ -2602,7 +2636,7 @@ async def test_get_statistics_metadata( unit_class, ) -> None: """Test get_statistics_metadata.""" - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) has_mean = attributes["state_class"] == "measurement" has_sum = not has_mean @@ -2681,10 +2715,14 @@ async def test_get_statistics_metadata( } ] - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) - hass.states.async_set("sensor.test2", 10, attributes=attributes) + hass.states.async_set( + "sensor.test2", 10, attributes=attributes, timestamp=now.timestamp() + ) await async_wait_recording_done(hass) await client.send_json_auto_id( diff --git a/tests/components/refoss/conftest.py b/tests/components/refoss/conftest.py index 80b3f4d8b75..5ded3e9489d 100644 --- a/tests/components/refoss/conftest.py +++ b/tests/components/refoss/conftest.py @@ -1,9 +1,9 @@ """Pytest module configuration.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/remote/test_device_action.py b/tests/components/remote/test_device_action.py index a6e890937b5..e224fcf4939 100644 --- a/tests/components/remote/test_device_action.py +++ b/tests/components/remote/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.remote import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,12 +24,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -114,7 +108,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -189,7 +182,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/remote/test_device_condition.py b/tests/components/remote/test_device_condition.py index d13a0480355..6c9334aeac4 100644 --- a/tests/components/remote/test_device_condition.py +++ b/tests/components/remote/test_device_condition.py @@ -20,7 +20,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -183,7 +176,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -249,20 +242,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -270,7 +263,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -315,13 +308,13 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -329,7 +322,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -378,26 +371,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/remote/test_device_trigger.py b/tests/components/remote/test_device_trigger.py index 8a1a0c318d7..c647faba2c1 100644 --- a/tests/components/remote/test_device_trigger.py +++ b/tests/components/remote/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -181,7 +174,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -267,20 +260,20 @@ async def test_if_fires_on_state_change( ] }, ) - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -291,7 +284,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -335,13 +328,13 @@ async def test_if_fires_on_state_change_legacy( ] }, ) - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -351,7 +344,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -397,16 +390,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/renault/conftest.py b/tests/components/renault/conftest.py index a5af01b504a..9be41eb7ba0 100644 --- a/tests/components/renault/conftest.py +++ b/tests/components/renault/conftest.py @@ -1,5 +1,6 @@ """Provide common Renault fixtures.""" +from collections.abc import Generator, Iterator import contextlib from types import MappingProxyType from typing import Any @@ -8,7 +9,6 @@ from unittest.mock import AsyncMock, patch import pytest from renault_api.kamereon import exceptions, schemas from renault_api.renault_account import RenaultAccount -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry @@ -200,7 +200,7 @@ def patch_fixtures_with_no_data(): @contextlib.contextmanager -def _patch_fixtures_with_side_effect(side_effect: Any): +def _patch_fixtures_with_side_effect(side_effect: Any) -> Iterator[None]: """Mock fixtures.""" with ( patch( diff --git a/tests/components/renault/snapshots/test_binary_sensor.ambr b/tests/components/renault/snapshots/test_binary_sensor.ambr index 8f49d7ef761..9dac0c323ce 100644 --- a/tests/components/renault/snapshots/test_binary_sensor.ambr +++ b/tests/components/renault/snapshots/test_binary_sensor.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -321,6 +322,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -706,6 +708,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -875,6 +878,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1302,6 +1306,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1601,6 +1606,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1986,6 +1992,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -2155,6 +2162,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/renault/snapshots/test_button.ambr b/tests/components/renault/snapshots/test_button.ambr index 7fa37319b2e..c4732ad1458 100644 --- a/tests/components/renault/snapshots/test_button.ambr +++ b/tests/components/renault/snapshots/test_button.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -105,6 +106,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -272,6 +274,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -439,6 +442,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -606,6 +610,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -689,6 +694,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -856,6 +862,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1023,6 +1030,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/renault/snapshots/test_device_tracker.ambr b/tests/components/renault/snapshots/test_device_tracker.ambr index 61232d0268d..5e7813316a2 100644 --- a/tests/components/renault/snapshots/test_device_tracker.ambr +++ b/tests/components/renault/snapshots/test_device_tracker.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -106,6 +107,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -190,6 +192,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -231,6 +234,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -315,6 +319,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -402,6 +407,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -489,6 +495,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -530,6 +537,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index 30181fd3b9c..ccdc76f0130 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -63,6 +64,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -159,6 +161,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -255,6 +258,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -351,6 +355,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -392,6 +397,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -488,6 +494,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -584,6 +591,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index 1ae033101d4..e4bb2d74297 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -331,6 +332,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1085,6 +1087,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -1835,6 +1838,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -2628,6 +2632,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -2937,6 +2942,7 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -3691,6 +3697,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , @@ -4441,6 +4448,7 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', + 'model_id': None, 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/renault/test_binary_sensor.py b/tests/components/renault/test_binary_sensor.py index a0264493544..52b6de33f14 100644 --- a/tests/components/renault/test_binary_sensor.py +++ b/tests/components/renault/test_binary_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault binary sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_button.py b/tests/components/renault/test_button.py index bed188d8881..32c5ce651ae 100644 --- a/tests/components/renault/test_button.py +++ b/tests/components/renault/test_button.py @@ -1,11 +1,11 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/renault/test_device_tracker.py b/tests/components/renault/test_device_tracker.py index d8bee097eda..39f37d12a4d 100644 --- a/tests/components/renault/test_device_tracker.py +++ b/tests/components/renault/test_device_tracker.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_init.py b/tests/components/renault/test_init.py index 90963fd3521..0f9d9cbaf5b 100644 --- a/tests/components/renault/test_init.py +++ b/tests/components/renault/test_init.py @@ -1,12 +1,12 @@ """Tests for Renault setup process.""" +from collections.abc import Generator from typing import Any from unittest.mock import Mock, patch import aiohttp import pytest from renault_api.gigya.exceptions import GigyaException, InvalidCredentialsException -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/renault/test_select.py b/tests/components/renault/test_select.py index 0577966d514..7b589d86863 100644 --- a/tests/components/renault/test_select.py +++ b/tests/components/renault/test_select.py @@ -1,11 +1,11 @@ """Tests for Renault selects.""" +from collections.abc import Generator from unittest.mock import patch import pytest from renault_api.kamereon import schemas from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.select import ( ATTR_OPTION, diff --git a/tests/components/renault/test_sensor.py b/tests/components/renault/test_sensor.py index 7e8e4f24c77..d69ab5c0b7f 100644 --- a/tests/components/renault/test_sensor.py +++ b/tests/components/renault/test_sensor.py @@ -1,10 +1,10 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index d30626e4117..4e3460b9afa 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -1,5 +1,6 @@ """Tests for Renault sensors.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import patch @@ -7,7 +8,6 @@ import pytest from renault_api.exceptions import RenaultException from renault_api.kamereon import schemas from renault_api.kamereon.models import ChargeSchedule -from typing_extensions import Generator from homeassistant.components.renault.const import DOMAIN from homeassistant.components.renault.services import ( diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 105815bae1d..c74cac76192 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -1,9 +1,9 @@ """Setup the Reolink tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL @@ -35,6 +35,7 @@ TEST_NVR_NAME = "test_reolink_name" TEST_NVR_NAME2 = "test2_reolink_name" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" +TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" @@ -83,6 +84,7 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.sw_version = "v1.0.0.0.0.0000" host_mock.manufacturer = "Reolink" host_mock.model = TEST_HOST_MODEL + host_mock.item_number = TEST_ITEM_NUMBER host_mock.camera_model.return_value = TEST_CAM_MODEL host_mock.camera_name.return_value = TEST_NVR_NAME host_mock.camera_hardware_version.return_value = "IPC_00001" diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index ba845dc1697..6e57a7924e7 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -166,8 +166,23 @@ async def test_config_flow_errors( assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" - assert result["errors"] == {CONF_HOST: "invalid_auth"} + assert result["errors"] == {CONF_PASSWORD: "invalid_auth"} + reolink_connect.valid_password.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_HOST: TEST_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {CONF_PASSWORD: "password_incompatible"} + + reolink_connect.valid_password.return_value = True reolink_connect.get_host_data.side_effect = ApiError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index f70fd312051..4f745530b6b 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -17,7 +17,7 @@ from homeassistant.components.reolink import ( from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform -from homeassistant.core import DOMAIN as HA_DOMAIN, HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.helpers import ( device_registry as dr, entity_registry as er, @@ -143,13 +143,13 @@ async def test_credential_error_three( issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): - assert (HA_DOMAIN, issue_id) not in issue_registry.issues + assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues async_fire_time_changed( hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) ) await hass.async_block_till_done() - assert (HA_DOMAIN, issue_id) in issue_registry.issues + assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues async def test_entry_reloading( @@ -282,6 +282,15 @@ async def test_removing_disconnected_cams( True, False, ), + ( + f"{TEST_MAC}_chime123456789_play_ringtone", + f"{TEST_UID}_chime123456789_play_ringtone", + f"{TEST_MAC}_chime123456789", + f"{TEST_UID}_chime123456789", + Platform.SELECT, + True, + False, + ), ( f"{TEST_MAC}_0_record_audio", f"{TEST_MAC}_{TEST_UID_CAM}_record_audio", diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py new file mode 100644 index 00000000000..53c1e494b3d --- /dev/null +++ b/tests/components/reolink/test_select.py @@ -0,0 +1,167 @@ +"""Test the Reolink select platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_SELECT_OPTION, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_floodlight_mode_select( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select entity with floodlight_mode.""" + reolink_connect.whiteled_mode.return_value = 1 + reolink_connect.whiteled_mode_list.return_value = ["off", "auto"] + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" + assert hass.states.is_state(entity_id, "auto") + + reolink_connect.set_whiteled = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_called_once() + + reolink_connect.set_whiteled = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + reolink_connect.set_whiteled = AsyncMock( + side_effect=InvalidParameterError("Test error") + ) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + +async def test_play_quick_reply_message( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select play_quick_reply_message entity.""" + reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" + assert hass.states.is_state(entity_id, STATE_UNKNOWN) + + reolink_connect.play_quick_reply = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "test message"}, + blocking=True, + ) + reolink_connect.play_quick_reply.assert_called_once() + + +async def test_chime_select( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime select entity.""" + TEST_CHIME = Chime( + host=reolink_connect, + dev_id=12345678, + channel=0, + ) + TEST_CHIME.name = "Test chime" + TEST_CHIME.volume = 3 + TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } + + reolink_connect.chime_list = [TEST_CHIME] + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + assert hass.states.is_state(entity_id, "pianokey") + + TEST_CHIME.set_tone = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + TEST_CHIME.set_tone.assert_called_once() + + TEST_CHIME.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + TEST_CHIME.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + TEST_CHIME.event_info = {} + async_fire_time_changed( + hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) + ) + await hass.async_block_till_done() + + assert hass.states.is_state(entity_id, STATE_UNKNOWN) diff --git a/tests/components/reolink/test_switch.py b/tests/components/reolink/test_switch.py new file mode 100644 index 00000000000..ebf805b593d --- /dev/null +++ b/tests/components/reolink/test_switch.py @@ -0,0 +1,81 @@ +"""Test the Reolink switch platform.""" + +from unittest.mock import MagicMock, patch + +from homeassistant.components.reolink import const +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir + +from .conftest import TEST_UID + +from tests.common import MockConfigEntry + + +async def test_cleanup_hdr_switch_( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test cleanup of the HDR switch entity.""" + original_id = f"{TEST_UID}_hdr" + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.supported.return_value = True + + entity_registry.async_get_or_create( + domain=domain, + platform=const.DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=er.RegistryEntryDisabler.USER, + ) + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None + ) + + +async def test_hdr_switch_deprecated_repair_issue( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test repairs issue is raised when hdr switch entity used.""" + original_id = f"{TEST_UID}_hdr" + domain = Platform.SWITCH + + reolink_connect.channels = [0] + reolink_connect.supported.return_value = True + + entity_registry.async_get_or_create( + domain=domain, + platform=const.DOMAIN, + unique_id=original_id, + config_entry=config_entry, + suggested_object_id=original_id, + disabled_by=None, + ) + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + # setup CH 0 and host entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [domain]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + + assert (const.DOMAIN, "hdr_switch_deprecated") in issue_registry.issues diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index 60d0364b985..dcc6932cf4a 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -115,7 +115,7 @@ class MockFixFlowAbort(RepairsFlow): @pytest.fixture(autouse=True) -async def mock_repairs_integration(hass): +async def mock_repairs_integration(hass: HomeAssistant) -> None: """Mock a repairs integration.""" hass.config.components.add("fake_integration") diff --git a/tests/components/rest/test_init.py b/tests/components/rest/test_init.py index 0fda89cc329..02dfe6364ff 100644 --- a/tests/components/rest/test_init.py +++ b/tests/components/rest/test_init.py @@ -16,7 +16,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, UnitOfInformation, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -468,7 +468,7 @@ async def test_config_schema_via_packages(hass: HomeAssistant) -> None: "pack_11": {"rest": {"resource": "http://url1"}}, "pack_list": {"rest": [{"resource": "http://url2"}]}, } - config = {hass_config.HA_DOMAIN: {hass_config.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {hass_config.CONF_PACKAGES: packages}} await hass_config.merge_packages_config(hass, config, packages) assert len(config) == 2 diff --git a/tests/components/rflink/test_binary_sensor.py b/tests/components/rflink/test_binary_sensor.py index c92eaa30fe8..9329edb3a00 100644 --- a/tests/components/rflink/test_binary_sensor.py +++ b/tests/components/rflink/test_binary_sensor.py @@ -7,6 +7,7 @@ automatic sensor creation. from datetime import timedelta from freezegun import freeze_time +import pytest from homeassistant.components.rflink import CONF_RECONNECT_INTERVAL from homeassistant.const import ( @@ -45,7 +46,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -84,7 +87,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("binary_sensor.test").state == STATE_OFF -async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: +async def test_entity_availability( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -125,7 +130,7 @@ async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("binary_sensor.test").state == STATE_ON -async def test_off_delay(hass: HomeAssistant, monkeypatch) -> None: +async def test_off_delay(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: """Test off_delay option.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -188,7 +193,9 @@ async def test_off_delay(hass: HomeAssistant, monkeypatch) -> None: assert len(events) == 3 -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" mock_restore_cache( hass, (State(f"{DOMAIN}.test", STATE_ON), State(f"{DOMAIN}.test2", STATE_ON)) diff --git a/tests/components/rflink/test_cover.py b/tests/components/rflink/test_cover.py index 0829fddef51..0f14e76620f 100644 --- a/tests/components/rflink/test_cover.py +++ b/tests/components/rflink/test_cover.py @@ -5,6 +5,8 @@ control of RFLink cover devices. """ +import pytest + from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, @@ -37,7 +39,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the RFLink cover component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -107,7 +111,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[1][0][1] == "UP" -async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: +async def test_firing_bus_event( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -142,7 +148,9 @@ async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "down", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -180,7 +188,9 @@ async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_count == 5 -async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_alternation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -211,7 +221,9 @@ async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_cancelling( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -240,7 +252,9 @@ async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) - assert protocol.send_command_ack.call_args_list[3][0][1] == "UP" -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -270,7 +284,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -303,7 +319,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -331,7 +349,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == STATE_OPEN -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -377,7 +397,9 @@ async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: # The code checks the ID, it will use the # 'inverted' class when the name starts with # 'newkaku' -async def test_inverted_cover(hass: HomeAssistant, monkeypatch) -> None: +async def test_inverted_cover( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_init.py b/tests/components/rflink/test_init.py index f901e46aea1..1caae302748 100644 --- a/tests/components/rflink/test_init.py +++ b/tests/components/rflink/test_init.py @@ -5,7 +5,6 @@ from unittest.mock import Mock import pytest from voluptuous.error import MultipleInvalid -from homeassistant.bootstrap import async_setup_component from homeassistant.components.rflink import ( CONF_KEEPALIVE_IDLE, CONF_RECONNECT_INTERVAL, @@ -28,10 +27,16 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component async def mock_rflink( - hass, config, domain, monkeypatch, failures=None, failcommand=False + hass: HomeAssistant, + config, + domain, + monkeypatch: pytest.MonkeyPatch, + failures=None, + failcommand=False, ): """Create mock RFLink asyncio protocol, test component setup.""" transport, protocol = (Mock(), Mock()) @@ -77,7 +82,9 @@ async def mock_rflink( return event_callback, mock_create, protocol, disconnect_callback -async def test_version_banner(hass: HomeAssistant, monkeypatch) -> None: +async def test_version_banner( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sending unknown commands doesn't cause issues.""" # use sensor domain during testing main platform domain = "sensor" @@ -102,7 +109,9 @@ async def test_version_banner(hass: HomeAssistant, monkeypatch) -> None: ) -async def test_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_no_wait( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test command sending without ack.""" domain = "switch" config = { @@ -126,7 +135,9 @@ async def test_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command.call_args_list[0][0][1] == "off" -async def test_cover_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: +async def test_cover_send_no_wait( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test command sending to a cover device without ack.""" domain = "cover" config = { @@ -150,7 +161,9 @@ async def test_cover_send_no_wait(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command.call_args_list[0][0][1] == "STOP" -async def test_send_command(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -168,7 +181,9 @@ async def test_send_command(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[0][0][1] == "on" -async def test_send_command_invalid_arguments(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command_invalid_arguments( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test send_command service.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -201,7 +216,9 @@ async def test_send_command_invalid_arguments(hass: HomeAssistant, monkeypatch) assert not success, "send command should not succeed for unknown command" -async def test_send_command_event_propagation(hass: HomeAssistant, monkeypatch) -> None: +async def test_send_command_event_propagation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test event propagation for send_command service.""" domain = "light" config = { @@ -243,7 +260,9 @@ async def test_send_command_event_propagation(hass: HomeAssistant, monkeypatch) assert hass.states.get(f"{domain}.test1").state == "off" -async def test_reconnecting_after_disconnect(hass: HomeAssistant, monkeypatch) -> None: +async def test_reconnecting_after_disconnect( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """An unexpected disconnect should cause a reconnect.""" domain = "sensor" config = { @@ -267,7 +286,9 @@ async def test_reconnecting_after_disconnect(hass: HomeAssistant, monkeypatch) - assert mock_create.call_count == 2 -async def test_reconnecting_after_failure(hass: HomeAssistant, monkeypatch) -> None: +async def test_reconnecting_after_failure( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """A failure to reconnect should be retried.""" domain = "sensor" config = { @@ -294,7 +315,9 @@ async def test_reconnecting_after_failure(hass: HomeAssistant, monkeypatch) -> N assert mock_create.call_count == 3 -async def test_error_when_not_connected(hass: HomeAssistant, monkeypatch) -> None: +async def test_error_when_not_connected( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Sending command should error when not connected.""" domain = "switch" config = { @@ -324,7 +347,9 @@ async def test_error_when_not_connected(hass: HomeAssistant, monkeypatch) -> Non assert not success, "changing state should not succeed when disconnected" -async def test_async_send_command_error(hass: HomeAssistant, monkeypatch) -> None: +async def test_async_send_command_error( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Sending command should error when protocol fails.""" domain = "rflink" config = {"rflink": {"port": "/dev/ttyABC0"}} @@ -345,7 +370,9 @@ async def test_async_send_command_error(hass: HomeAssistant, monkeypatch) -> Non assert protocol.send_command_ack.call_args_list[0][0][1] == SERVICE_TURN_OFF -async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: +async def test_race_condition( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test race condition for unknown components.""" domain = "light" config = {"rflink": {"port": "/dev/ttyABC0"}, domain: {"platform": "rflink"}} @@ -381,7 +408,7 @@ async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: assert new_sensor.state == "on" -async def test_not_connected(hass: HomeAssistant, monkeypatch) -> None: +async def test_not_connected() -> None: """Test Error when sending commands to a disconnected device.""" test_device = RflinkCommand("DUMMY_DEVICE") RflinkCommand.set_rflink_protocol(None) @@ -390,7 +417,9 @@ async def test_not_connected(hass: HomeAssistant, monkeypatch) -> None: async def test_keepalive( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate negative keepalive values.""" keepalive_value = -3 @@ -418,7 +447,9 @@ async def test_keepalive( async def test_keepalive_2( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate very short keepalive values.""" keepalive_value = 30 @@ -446,7 +477,9 @@ async def test_keepalive_2( async def test_keepalive_3( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -466,7 +499,9 @@ async def test_keepalive_3( async def test_default_keepalive( - hass: HomeAssistant, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Validate keepalive=0 value.""" domain = RFLINK_DOMAIN @@ -485,7 +520,9 @@ async def test_default_keepalive( async def test_unique_id( - hass: HomeAssistant, entity_registry: er.EntityRegistry, monkeypatch + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Validate the device unique_id.""" diff --git a/tests/components/rflink/test_light.py b/tests/components/rflink/test_light.py index 5ee2375bc36..ceb2b19e192 100644 --- a/tests/components/rflink/test_light.py +++ b/tests/components/rflink/test_light.py @@ -5,6 +5,8 @@ control of RFLink switch devices. """ +import pytest + from homeassistant.components.light import ATTR_BRIGHTNESS from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( @@ -38,7 +40,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the RFLink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -146,7 +150,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[5][0][1] == "7" -async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: +async def test_firing_bus_event( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Incoming RFLink command events should be put on the HA event bus.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -181,7 +187,9 @@ async def test_firing_bus_event(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Command should be sent amount of configured repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -237,7 +245,9 @@ async def test_signal_repetitions(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_count == 8 -async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_alternation( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Simultaneously switching entities must alternate repetitions.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -268,7 +278,9 @@ async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) assert protocol.send_command_ack.call_args_list[3][0][0] == "protocol_0_1" -async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) -> None: +async def test_signal_repetitions_cancelling( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Cancel outstanding repetitions when state changed.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -302,7 +314,9 @@ async def test_signal_repetitions_cancelling(hass: HomeAssistant, monkeypatch) - ] -async def test_type_toggle(hass: HomeAssistant, monkeypatch) -> None: +async def test_type_toggle( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test toggle type lights (on/on).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -347,7 +361,9 @@ async def test_type_toggle(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.toggle_test").state == "off" -async def test_set_level_command(hass: HomeAssistant, monkeypatch) -> None: +async def test_set_level_command( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test 'set_level=XX' events.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -434,7 +450,9 @@ async def test_set_level_command(hass: HomeAssistant, monkeypatch) -> None: assert state.attributes[ATTR_BRIGHTNESS] == 0 -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -471,7 +489,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test2").state == "on" -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -504,7 +524,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -532,7 +554,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: +async def test_disable_automatic_add( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -550,7 +574,9 @@ async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: assert not hass.states.get(f"{DOMAIN}.protocol_0_0") -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_sensor.py b/tests/components/rflink/test_sensor.py index e375f3ae863..278dd45a114 100644 --- a/tests/components/rflink/test_sensor.py +++ b/tests/components/rflink/test_sensor.py @@ -5,6 +5,8 @@ automatic sensor creation. """ +import pytest + from homeassistant.components.rflink import ( CONF_RECONNECT_INTERVAL, DATA_ENTITY_LOOKUP, @@ -39,7 +41,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink sensor component.""" # setup mocking rflink module event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch) @@ -100,7 +104,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert bat_sensor.attributes[ATTR_ICON] == "mdi:battery" -async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: +async def test_disable_automatic_add( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If disabled new devices should not be automatically added.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -125,7 +131,9 @@ async def test_disable_automatic_add(hass: HomeAssistant, monkeypatch) -> None: assert not hass.states.get("sensor.test2") -async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: +async def test_entity_availability( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """If Rflink device is disconnected, entities should become unavailable.""" # Make sure Rflink mock does not 'recover' to quickly from the # disconnect or else the unavailability cannot be measured @@ -160,7 +168,7 @@ async def test_entity_availability(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get("sensor.test").state == STATE_UNKNOWN -async def test_aliases(hass: HomeAssistant, monkeypatch) -> None: +async def test_aliases(hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch) -> None: """Validate the response to sensor's alias (with aliases).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -202,7 +210,9 @@ async def test_aliases(hass: HomeAssistant, monkeypatch) -> None: assert updated_sensor.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE -async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: +async def test_race_condition( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test race condition for unknown components.""" config = {"rflink": {"port": "/dev/ttyABC0"}, DOMAIN: {"platform": "rflink"}} tmp_entity = TMP_ENTITY.format("test3") @@ -241,7 +251,9 @@ async def test_race_condition(hass: HomeAssistant, monkeypatch) -> None: assert new_sensor.state == "ko" -async def test_sensor_attributes(hass: HomeAssistant, monkeypatch) -> None: +async def test_sensor_attributes( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Validate the sensor attributes.""" config = { diff --git a/tests/components/rflink/test_switch.py b/tests/components/rflink/test_switch.py index 705856565ae..2aab145f847 100644 --- a/tests/components/rflink/test_switch.py +++ b/tests/components/rflink/test_switch.py @@ -5,6 +5,8 @@ control of Rflink switch devices. """ +import pytest + from homeassistant.components.rflink import EVENT_BUTTON_PRESSED from homeassistant.const import ( ATTR_ENTITY_ID, @@ -33,7 +35,9 @@ CONFIG = { } -async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: +async def test_default_setup( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Test all basic functionality of the rflink switch component.""" # setup mocking rflink module event_callback, create, protocol, _ = await mock_rflink( @@ -93,7 +97,9 @@ async def test_default_setup(hass: HomeAssistant, monkeypatch) -> None: assert protocol.send_command_ack.call_args_list[1][0][1] == "on" -async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_group_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Group aliases should only respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -123,7 +129,9 @@ async def test_group_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_alias( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Non group aliases should not respond to group commands.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -156,7 +164,9 @@ async def test_nogroup_alias(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: +async def test_nogroup_device_id( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Device id that do not respond to group commands (allon/alloff).""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -184,7 +194,9 @@ async def test_nogroup_device_id(hass: HomeAssistant, monkeypatch) -> None: assert hass.states.get(f"{DOMAIN}.test").state == "on" -async def test_device_defaults(hass: HomeAssistant, monkeypatch) -> None: +async def test_device_defaults( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Event should fire if device_defaults config says so.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -216,7 +228,9 @@ async def test_device_defaults(hass: HomeAssistant, monkeypatch) -> None: assert calls[0].data == {"state": "off", "entity_id": f"{DOMAIN}.test"} -async def test_not_firing_default(hass: HomeAssistant, monkeypatch) -> None: +async def test_not_firing_default( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """By default no bus events should be fired.""" config = { "rflink": {"port": "/dev/ttyABC0"}, @@ -246,7 +260,9 @@ async def test_not_firing_default(hass: HomeAssistant, monkeypatch) -> None: assert not calls, "an event has been fired" -async def test_restore_state(hass: HomeAssistant, monkeypatch) -> None: +async def test_restore_state( + hass: HomeAssistant, monkeypatch: pytest.MonkeyPatch +) -> None: """Ensure states are restored on startup.""" config = { "rflink": {"port": "/dev/ttyABC0"}, diff --git a/tests/components/rflink/test_utils.py b/tests/components/rflink/test_utils.py index 170a05f8623..38804d14ecc 100644 --- a/tests/components/rflink/test_utils.py +++ b/tests/components/rflink/test_utils.py @@ -4,10 +4,9 @@ from homeassistant.components.rflink.utils import ( brightness_to_rflink, rflink_to_brightness, ) -from homeassistant.core import HomeAssistant -async def test_utils(hass: HomeAssistant, monkeypatch) -> None: +async def test_utils() -> None: """Test all utils methods.""" # test brightness_to_rflink assert brightness_to_rflink(0) == 0 diff --git a/tests/components/ridwell/test_diagnostics.py b/tests/components/ridwell/test_diagnostics.py index adfbb525283..45683bba903 100644 --- a/tests/components/ridwell/test_diagnostics.py +++ b/tests/components/ridwell/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Ridwell diagnostics.""" from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,7 +17,6 @@ async def test_entry_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 58e77184f55..cd4447c1a9a 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -1,11 +1,11 @@ """Configuration for Ring tests.""" +from collections.abc import Generator from itertools import chain from unittest.mock import AsyncMock, Mock, create_autospec, patch import pytest import ring_doorbell -from typing_extensions import Generator from homeassistant.components.ring import DOMAIN from homeassistant.const import CONF_USERNAME diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index f43370c918d..88ad37bdd36 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -142,6 +142,9 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): DOORBOT_HISTORY if device_family != "other" else INTERCOM_HISTORY ) + if has_capability(RingCapability.VIDEO): + mock_device.recording_url = MagicMock(return_value="http://dummy.url") + if has_capability(RingCapability.MOTION_DETECTION): mock_device.configure_mock( motion_detection=device_dict["settings"].get("motion_detection_enabled"), diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 20a9ed5f0c9..49b7dc10f05 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,18 +1,33 @@ """The tests for the Ring switch platform.""" -from unittest.mock import PropertyMock +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from aiohttp.test_utils import make_mocked_request +from freezegun.api import FrozenDateTimeFactory import pytest import ring_doorbell +from homeassistant.components import camera +from homeassistant.components.ring.camera import FORCE_REFRESH_INTERVAL +from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er +from homeassistant.util.aiohttp import MockStreamReader from .common import setup_platform +from tests.common import async_fire_time_changed + +SMALLEST_VALID_JPEG = ( + "ffd8ffe000104a46494600010101004800480000ffdb00430003020202020203020202030303030406040404040408060" + "6050609080a0a090809090a0c0f0c0a0b0e0b09090d110d0e0f101011100a0c12131210130f101010ffc9000b08000100" + "0101011100ffcc000600101005ffda0008010100003f00d2cf20ffd9" +) +SMALLEST_VALID_JPEG_BYTES = bytes.fromhex(SMALLEST_VALID_JPEG) + async def test_entity_registry( hass: HomeAssistant, @@ -52,7 +67,7 @@ async def test_camera_motion_detection_state_reports_correctly( assert state.attributes.get("friendly_name") == friendly_name -async def test_camera_motion_detection_can_be_turned_on( +async def test_camera_motion_detection_can_be_turned_on_and_off( hass: HomeAssistant, mock_ring_client ) -> None: """Tests the siren turns on correctly.""" @@ -73,6 +88,55 @@ async def test_camera_motion_detection_can_be_turned_on( state = hass.states.get("camera.front") assert state.attributes.get("motion_detection") is True + await hass.services.async_call( + "camera", + "disable_motion_detection", + {"entity_id": "camera.front"}, + blocking=True, + ) + + await hass.async_block_till_done() + + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + + +async def test_camera_motion_detection_not_supported( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + caplog: pytest.LogCaptureFixture, +) -> None: + """Tests the siren turns on correctly.""" + front_camera_mock = mock_ring_devices.get_device(765432) + has_capability = front_camera_mock.has_capability.side_effect + + def _has_capability(capability): + if capability == "motion_detection": + return False + return has_capability(capability) + + front_camera_mock.has_capability.side_effect = _has_capability + + await setup_platform(hass, Platform.CAMERA) + + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + + await hass.services.async_call( + "camera", + "enable_motion_detection", + {"entity_id": "camera.front"}, + blocking=True, + ) + + await hass.async_block_till_done() + state = hass.states.get("camera.front") + assert state.attributes.get("motion_detection") is None + assert ( + "Entity camera.front does not have motion detection capability" in caplog.text + ) + async def test_updates_work( hass: HomeAssistant, mock_ring_client, mock_ring_devices @@ -136,3 +200,117 @@ async def test_motion_detection_errors_when_turned_on( ) == reauth_expected ) + + +async def test_camera_handle_mjpeg_stream( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + freezer: FrozenDateTimeFactory, +) -> None: + """Test camera returns handle mjpeg stream when available.""" + await setup_platform(hass, Platform.CAMERA) + + front_camera_mock = mock_ring_devices.get_device(765432) + front_camera_mock.recording_url.return_value = None + + state = hass.states.get("camera.front") + assert state is not None + + mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) + + # history not updated yet + front_camera_mock.history.assert_not_called() + front_camera_mock.recording_url.assert_not_called() + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # Video url will be none so no stream + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.history.assert_called_once() + front_camera_mock.recording_url.assert_called_once() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # Stop the history updating so we can update the values manually + front_camera_mock.history = MagicMock() + front_camera_mock.last_history[0]["recording"]["status"] = "not ready" + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_called_once() + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + # If the history id hasn't changed the camera will not check again for the video url + # until the FORCE_REFRESH_INTERVAL has passed + front_camera_mock.last_history[0]["recording"]["status"] = "ready" + front_camera_mock.recording_url = MagicMock(return_value="http://dummy.url") + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_not_called() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is None + + freezer.tick(FORCE_REFRESH_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + front_camera_mock.recording_url.assert_called_once() + + # Now the stream should be returned + stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) + with patch("homeassistant.components.ring.camera.CameraMjpeg") as mock_camera: + mock_camera.return_value.get_reader = AsyncMock(return_value=stream_reader) + mock_camera.return_value.open_camera = AsyncMock() + mock_camera.return_value.close = AsyncMock() + + stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") + assert stream is not None + # Check the stream has been read + assert not await stream_reader.read(-1) + + +async def test_camera_image( + hass: HomeAssistant, + mock_ring_client, + mock_ring_devices, + freezer: FrozenDateTimeFactory, +) -> None: + """Test camera will return still image when available.""" + await setup_platform(hass, Platform.CAMERA) + + front_camera_mock = mock_ring_devices.get_device(765432) + + state = hass.states.get("camera.front") + assert state is not None + + # history not updated yet + front_camera_mock.history.assert_not_called() + front_camera_mock.recording_url.assert_not_called() + with ( + patch( + "homeassistant.components.ring.camera.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ), + pytest.raises(HomeAssistantError), + ): + image = await camera.async_get_image(hass, "camera.front") + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + # history updated so image available + front_camera_mock.history.assert_called_once() + front_camera_mock.recording_url.assert_called_once() + + with patch( + "homeassistant.components.ring.camera.ffmpeg.async_get_image", + return_value=SMALLEST_VALID_JPEG_BYTES, + ): + image = await camera.async_get_image(hass, "camera.front") + assert image.content == SMALLEST_VALID_JPEG_BYTES diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 72444bdc9f2..4c8f7bb4180 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -174,11 +174,10 @@ def save_mock(): @pytest.mark.parametrize("events", [TEST_EVENTS]) +@pytest.mark.usefixtures("two_zone_cloud", "_set_utc_time_zone") async def test_cloud_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, - two_zone_cloud, - _set_utc_time_zone, save_mock, setup_risco_cloud, ) -> None: @@ -207,11 +206,9 @@ async def test_cloud_setup( _check_state(hass, category, entity_id) +@pytest.mark.usefixtures("setup_risco_local", "_no_zones_and_partitions") async def test_local_setup( - hass: HomeAssistant, entity_registry: er.EntityRegistry, - setup_risco_local, - _no_zones_and_partitions, ) -> None: """Test entity setup.""" for entity_id in ENTITY_IDS.values(): diff --git a/tests/components/roborock/conftest.py b/tests/components/roborock/conftest.py index a7ebbf10af3..357c644e2fe 100644 --- a/tests/components/roborock/conftest.py +++ b/tests/components/roborock/conftest.py @@ -4,8 +4,8 @@ from copy import deepcopy from unittest.mock import patch import pytest -from roborock import RoomMapping -from roborock.code_mappings import DyadError, RoborockDyadStateCode +from roborock import RoborockCategory, RoomMapping +from roborock.code_mappings import DyadError, RoborockDyadStateCode, ZeoError, ZeoState from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol from roborock.version_a01_apis import RoborockMqttClientA01 @@ -38,14 +38,22 @@ class A01Mock(RoborockMqttClientA01): def __init__(self, user_data, device_info, category) -> None: """Initialize the A01Mock.""" super().__init__(user_data, device_info, category) - self.protocol_responses = { - RoborockDyadDataProtocol.STATUS: RoborockDyadStateCode.drying.name, - RoborockDyadDataProtocol.POWER: 100, - RoborockDyadDataProtocol.MESH_LEFT: 111, - RoborockDyadDataProtocol.BRUSH_LEFT: 222, - RoborockDyadDataProtocol.ERROR: DyadError.none.name, - RoborockDyadDataProtocol.TOTAL_RUN_TIME: 213, - } + if category == RoborockCategory.WET_DRY_VAC: + self.protocol_responses = { + RoborockDyadDataProtocol.STATUS: RoborockDyadStateCode.drying.name, + RoborockDyadDataProtocol.POWER: 100, + RoborockDyadDataProtocol.MESH_LEFT: 111, + RoborockDyadDataProtocol.BRUSH_LEFT: 222, + RoborockDyadDataProtocol.ERROR: DyadError.none.name, + RoborockDyadDataProtocol.TOTAL_RUN_TIME: 213, + } + elif category == RoborockCategory.WASHING_MACHINE: + self.protocol_responses: list[RoborockZeoProtocol] = { + RoborockZeoProtocol.STATE: ZeoState.drying.name, + RoborockZeoProtocol.COUNTDOWN: 0, + RoborockZeoProtocol.WASHING_LEFT: 253, + RoborockZeoProtocol.ERROR: ZeoError.none.name, + } async def update_values( self, dyad_data_protocols: list[RoborockDyadDataProtocol | RoborockZeoProtocol] diff --git a/tests/components/roborock/snapshots/test_diagnostics.ambr b/tests/components/roborock/snapshots/test_diagnostics.ambr index 4318b537a2c..805a498041a 100644 --- a/tests/components/roborock/snapshots/test_diagnostics.ambr +++ b/tests/components/roborock/snapshots/test_diagnostics.ambr @@ -951,6 +951,355 @@ }), }), }), + '**REDACTED-3**': dict({ + 'api': dict({ + 'misc_info': dict({ + }), + }), + 'roborock_device_info': dict({ + 'device': dict({ + 'activeTime': 1699964128, + 'deviceStatus': dict({ + '10001': '{"f":"t"}', + '10005': '{"sn":"zeo_sn","ssid":"internet","timezone":"Europe/Berlin","posix_timezone":"CET-1CEST,M3.5.0,M10.5.0/3","ip":"192.111.11.11","mac":"b0:4a:00:00:00:00","rssi":-57,"oba":{"language":"en","name":"A.03.0403_CE","bom":"A.03.0403","location":"de","wifiplan":"EU","timezone":"CET-1CEST,M3.5.0,M10.5.0/3;Europe/Berlin","logserver":"awsde0","loglevel":"4","featureset":"0"}}', + '10007': '{"mqttOtaData":{"mqttOtaStatus":{"status":"IDLE"}}}', + '200': 1, + '201': 0, + '202': 1, + '203': 7, + '204': 1, + '205': 33, + '206': 0, + '207': 4, + '208': 2, + '209': 7, + '210': 1, + '211': 1, + '212': 1, + '213': 2, + '214': 2, + '217': 0, + '218': 227, + '219': 0, + '220': 0, + '221': 0, + '222': 347414, + '223': 0, + '224': 21, + '225': 0, + '226': 0, + '227': 1, + '232': 0, + }), + 'duid': '**REDACTED**', + 'f': False, + 'featureSet': '0', + 'fv': '01.00.94', + 'iconUrl': '', + 'localKey': '**REDACTED**', + 'name': 'Zeo One', + 'newFeatureSet': '40', + 'online': True, + 'productId': 'zeo_id', + 'pv': 'A01', + 'share': True, + 'shareTime': 1712763572, + 'silentOtaSwitch': False, + 'sn': 'zeo_sn', + 'timeZoneId': 'Europe/Berlin', + 'tuyaMigrated': False, + }), + 'product': dict({ + 'capability': 2, + 'category': 'roborock.wm', + 'id': 'zeo_id', + 'model': 'roborock.wm.a102', + 'name': 'Zeo One', + 'schema': list([ + dict({ + 'code': 'drying_status', + 'id': '134', + 'mode': 'ro', + 'name': '烘干状态', + 'type': 'RAW', + }), + dict({ + 'code': 'start', + 'id': '200', + 'mode': 'rw', + 'name': '启动', + 'type': 'BOOL', + }), + dict({ + 'code': 'pause', + 'id': '201', + 'mode': 'rw', + 'name': '暂停', + 'type': 'BOOL', + }), + dict({ + 'code': 'shutdown', + 'id': '202', + 'mode': 'rw', + 'name': '关机', + 'type': 'BOOL', + }), + dict({ + 'code': 'status', + 'id': '203', + 'mode': 'ro', + 'name': '状态', + 'type': 'VALUE', + }), + dict({ + 'code': 'mode', + 'id': '204', + 'mode': 'rw', + 'name': '模式', + 'type': 'VALUE', + }), + dict({ + 'code': 'program', + 'id': '205', + 'mode': 'rw', + 'name': '程序', + 'type': 'VALUE', + }), + dict({ + 'code': 'child_lock', + 'id': '206', + 'mode': 'rw', + 'name': '童锁', + 'type': 'BOOL', + }), + dict({ + 'code': 'temp', + 'id': '207', + 'mode': 'rw', + 'name': '洗涤温度', + 'type': 'VALUE', + }), + dict({ + 'code': 'rinse_times', + 'id': '208', + 'mode': 'rw', + 'name': '漂洗次数', + 'type': 'VALUE', + }), + dict({ + 'code': 'spin_level', + 'id': '209', + 'mode': 'rw', + 'name': '滚筒转速', + 'type': 'VALUE', + }), + dict({ + 'code': 'drying_mode', + 'id': '210', + 'mode': 'rw', + 'name': '干燥度', + 'type': 'VALUE', + }), + dict({ + 'code': 'detergent_set', + 'id': '211', + 'mode': 'rw', + 'name': '自动投放-洗衣液', + 'type': 'BOOL', + }), + dict({ + 'code': 'softener_set', + 'id': '212', + 'mode': 'rw', + 'name': '自动投放-柔顺剂', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_type', + 'id': '213', + 'mode': 'rw', + 'name': '洗衣液投放量', + 'type': 'VALUE', + }), + dict({ + 'code': 'softener_type', + 'id': '214', + 'mode': 'rw', + 'name': '柔顺剂投放量', + 'type': 'VALUE', + }), + dict({ + 'code': 'countdown', + 'id': '217', + 'mode': 'rw', + 'name': '预约时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'washing_left', + 'id': '218', + 'mode': 'ro', + 'name': '洗衣剩余时间', + 'type': 'VALUE', + }), + dict({ + 'code': 'doorlock_state', + 'id': '219', + 'mode': 'ro', + 'name': '门锁状态', + 'type': 'BOOL', + }), + dict({ + 'code': 'error', + 'id': '220', + 'mode': 'ro', + 'name': '故障', + 'type': 'VALUE', + }), + dict({ + 'code': 'custom_param_save', + 'id': '221', + 'mode': 'rw', + 'name': '云程序设置', + 'type': 'VALUE', + }), + dict({ + 'code': 'custom_param_get', + 'id': '222', + 'mode': 'ro', + 'name': '云程序读取', + 'type': 'VALUE', + }), + dict({ + 'code': 'sound_set', + 'id': '223', + 'mode': 'rw', + 'name': '提示音', + 'type': 'BOOL', + }), + dict({ + 'code': 'times_after_clean', + 'id': '224', + 'mode': 'ro', + 'name': '距离上次筒自洁次数', + 'type': 'VALUE', + }), + dict({ + 'code': 'default_setting', + 'id': '225', + 'mode': 'rw', + 'name': '记忆洗衣偏好开关', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_empty', + 'id': '226', + 'mode': 'ro', + 'name': '洗衣液用尽', + 'type': 'BOOL', + }), + dict({ + 'code': 'softener_empty', + 'id': '227', + 'mode': 'ro', + 'name': '柔顺剂用尽', + 'type': 'BOOL', + }), + dict({ + 'code': 'light_setting', + 'id': '229', + 'mode': 'rw', + 'name': '筒灯设定', + 'type': 'BOOL', + }), + dict({ + 'code': 'detergent_volume', + 'id': '230', + 'mode': 'rw', + 'name': '洗衣液投放量(单次)', + 'type': 'VALUE', + }), + dict({ + 'code': 'softener_volume', + 'id': '231', + 'mode': 'rw', + 'name': '柔顺剂投放量(单次)', + 'type': 'VALUE', + }), + dict({ + 'code': 'app_authorization', + 'id': '232', + 'mode': 'rw', + 'name': '远程控制授权', + 'type': 'VALUE', + }), + dict({ + 'code': 'id_query', + 'id': '10000', + 'mode': 'rw', + 'name': 'ID点查询', + 'type': 'STRING', + }), + dict({ + 'code': 'f_c', + 'id': '10001', + 'mode': 'ro', + 'name': '防串货', + 'type': 'STRING', + }), + dict({ + 'code': 'snd_state', + 'id': '10004', + 'mode': 'rw', + 'name': '语音包/OBA信息', + 'type': 'STRING', + }), + dict({ + 'code': 'product_info', + 'id': '10005', + 'mode': 'ro', + 'name': '产品信息', + 'type': 'STRING', + }), + dict({ + 'code': 'privacy_info', + 'id': '10006', + 'mode': 'rw', + 'name': '隐私协议', + 'type': 'STRING', + }), + dict({ + 'code': 'ota_nfo', + 'id': '10007', + 'mode': 'rw', + 'name': 'OTA info', + 'type': 'STRING', + }), + dict({ + 'code': 'washing_log', + 'id': '10008', + 'mode': 'ro', + 'name': '洗衣记录', + 'type': 'BOOL', + }), + dict({ + 'code': 'rpc_req', + 'id': '10101', + 'mode': 'wo', + 'name': 'rpc req', + 'type': 'STRING', + }), + dict({ + 'code': 'rpc_resp', + 'id': '10102', + 'mode': 'ro', + 'name': 'rpc resp', + 'type': 'STRING', + }), + ]), + }), + }), + }), }), }) # --- diff --git a/tests/components/roborock/test_init.py b/tests/components/roborock/test_init.py index 0437ce781f1..cace9a8ed67 100644 --- a/tests/components/roborock/test_init.py +++ b/tests/components/roborock/test_init.py @@ -29,7 +29,6 @@ async def test_unload_entry( await hass.async_block_till_done() assert mock_disconnect.call_count == 2 assert setup_entry.state is ConfigEntryState.NOT_LOADED - assert not hass.data.get(DOMAIN) async def test_config_entry_not_ready( @@ -177,3 +176,21 @@ async def test_not_supported_protocol( await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() assert "because its protocol version random" in caplog.text + + +async def test_not_supported_a01_device( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test that we output a message on incorrect category.""" + home_data_copy = deepcopy(HOME_DATA) + home_data_copy.products[2].category = "random" + with patch( + "homeassistant.components.roborock.RoborockApiClient.get_home_data_v2", + return_value=home_data_copy, + ): + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done() + assert "The device you added is not yet supported" in caplog.text diff --git a/tests/components/roborock/test_select.py b/tests/components/roborock/test_select.py index c8626818749..ce846107d93 100644 --- a/tests/components/roborock/test_select.py +++ b/tests/components/roborock/test_select.py @@ -1,13 +1,18 @@ """Test Roborock Select platform.""" +import copy from unittest.mock import patch import pytest from roborock.exceptions import RoborockException -from homeassistant.const import SERVICE_SELECT_OPTION +from homeassistant.components.roborock import DOMAIN +from homeassistant.const import SERVICE_SELECT_OPTION, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.setup import async_setup_component + +from .mock_data import PROP from tests.common import MockConfigEntry @@ -17,6 +22,7 @@ from tests.common import MockConfigEntry [ ("select.roborock_s7_maxv_mop_mode", "deep"), ("select.roborock_s7_maxv_mop_intensity", "mild"), + ("select.roborock_s7_maxv_selected_map", "Downstairs"), ], ) async def test_update_success( @@ -62,3 +68,21 @@ async def test_update_failure( blocking=True, target={"entity_id": "select.roborock_s7_maxv_mop_mode"}, ) + + +async def test_none_map_select( + hass: HomeAssistant, + bypass_api_fixture, + mock_roborock_entry: MockConfigEntry, +) -> None: + """Test that the select entity correctly handles not having a current map.""" + prop = copy.deepcopy(PROP) + # Set map status to None so that current map is never set + prop.status.map_status = None + with patch( + "homeassistant.components.roborock.coordinator.RoborockLocalClientV1.get_prop", + return_value=prop, + ): + await async_setup_component(hass, DOMAIN, {}) + select_entity = hass.states.get("select.roborock_s7_maxv_selected_map") + assert select_entity.state == STATE_UNKNOWN diff --git a/tests/components/roborock/test_sensor.py b/tests/components/roborock/test_sensor.py index e608895ca43..908754f3b92 100644 --- a/tests/components/roborock/test_sensor.py +++ b/tests/components/roborock/test_sensor.py @@ -21,7 +21,7 @@ from tests.common import MockConfigEntry async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> None: """Test sensors and check test values are correctly set.""" - assert len(hass.states.async_all("sensor")) == 34 + assert len(hass.states.async_all("sensor")) == 38 assert hass.states.get("sensor.roborock_s7_maxv_main_brush_time_left").state == str( MAIN_BRUSH_REPLACE_TIME - 74382 ) @@ -60,6 +60,10 @@ async def test_sensors(hass: HomeAssistant, setup_entry: MockConfigEntry) -> Non assert hass.states.get("sensor.dyad_pro_roller_left").state == "222" assert hass.states.get("sensor.dyad_pro_error").state == "none" assert hass.states.get("sensor.dyad_pro_total_cleaning_time").state == "213" + assert hass.states.get("sensor.zeo_one_state").state == "drying" + assert hass.states.get("sensor.zeo_one_countdown").state == "0" + assert hass.states.get("sensor.zeo_one_washing_left").state == "253" + assert hass.states.get("sensor.zeo_one_error").state == "none" async def test_listener_update( diff --git a/tests/components/roku/conftest.py b/tests/components/roku/conftest.py index 160a1bf3127..7ac332a1a6c 100644 --- a/tests/components/roku/conftest.py +++ b/tests/components/roku/conftest.py @@ -1,11 +1,11 @@ """Fixtures for Roku integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest from rokuecp import Device as RokuDevice -from typing_extensions import Generator from homeassistant.components.roku.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/rova/snapshots/test_init.ambr b/tests/components/rova/snapshots/test_init.ambr index ffb08ee082e..5e607e6a8df 100644 --- a/tests/components/rova/snapshots/test_init.ambr +++ b/tests/components/rova/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': '8381BE 13', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/rtsp_to_webrtc/conftest.py b/tests/components/rtsp_to_webrtc/conftest.py index 6e790b4ff00..956825f6372 100644 --- a/tests/components/rtsp_to_webrtc/conftest.py +++ b/tests/components/rtsp_to_webrtc/conftest.py @@ -2,13 +2,12 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import AsyncGenerator, Awaitable, Callable from typing import Any from unittest.mock import patch import pytest import rtsp_to_webrtc -from typing_extensions import AsyncGenerator from homeassistant.components import camera from homeassistant.components.rtsp_to_webrtc import DOMAIN diff --git a/tests/components/rtsp_to_webrtc/test_config_flow.py b/tests/components/rtsp_to_webrtc/test_config_flow.py index 504ede68ac7..5daf9400396 100644 --- a/tests/components/rtsp_to_webrtc/test_config_flow.py +++ b/tests/components/rtsp_to_webrtc/test_config_flow.py @@ -25,7 +25,7 @@ async def test_web_full_flow(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") == str + assert result.get("data_schema").schema.get("server_url") is str assert not result.get("errors") with ( patch("rtsp_to_webrtc.client.Client.heartbeat"), @@ -64,7 +64,7 @@ async def test_invalid_url(hass: HomeAssistant) -> None: ) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "user" - assert result.get("data_schema").schema.get("server_url") == str + assert result.get("data_schema").schema.get("server_url") is str assert not result.get("errors") result = await hass.config_entries.flow.async_configure( result["flow_id"], {"server_url": "not-a-url"} diff --git a/tests/components/russound_rio/__init__.py b/tests/components/russound_rio/__init__.py new file mode 100644 index 00000000000..96171071907 --- /dev/null +++ b/tests/components/russound_rio/__init__.py @@ -0,0 +1 @@ +"""Tests for the Russound RIO integration.""" diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py new file mode 100644 index 00000000000..a87d0a74fa8 --- /dev/null +++ b/tests/components/russound_rio/conftest.py @@ -0,0 +1,48 @@ +"""Test fixtures for Russound RIO integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .const import HARDWARE_MAC, MOCK_CONFIG, MOCK_CONTROLLERS, MODEL + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry(): + """Prevent setup.""" + with patch( + "homeassistant.components.russound_rio.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Mock a Russound RIO config entry.""" + entry = MockConfigEntry( + domain=DOMAIN, data=MOCK_CONFIG, unique_id=HARDWARE_MAC, title=MODEL + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_russound() -> Generator[AsyncMock]: + """Mock the Russound RIO client.""" + with ( + patch( + "homeassistant.components.russound_rio.Russound", autospec=True + ) as mock_client, + patch( + "homeassistant.components.russound_rio.config_flow.Russound", + return_value=mock_client, + ), + ): + mock_client.enumerate_controllers.return_value = MOCK_CONTROLLERS + yield mock_client diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py new file mode 100644 index 00000000000..d1f6aa7eead --- /dev/null +++ b/tests/components/russound_rio/const.py @@ -0,0 +1,16 @@ +"""Constants for russound_rio tests.""" + +from collections import namedtuple + +HOST = "127.0.0.1" +PORT = 9621 +MODEL = "MCA-C5" +HARDWARE_MAC = "00:11:22:33:44:55" + +MOCK_CONFIG = { + "host": HOST, + "port": PORT, +} + +_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) +MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} diff --git a/tests/components/russound_rio/test_config_flow.py b/tests/components/russound_rio/test_config_flow.py new file mode 100644 index 00000000000..8bc7bd738a1 --- /dev/null +++ b/tests/components/russound_rio/test_config_flow.py @@ -0,0 +1,135 @@ +"""Test the Russound RIO config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.russound_rio.const import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONFIG, MOCK_CONTROLLERS, MODEL + + +async def test_form( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_form_cannot_connect( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle cannot connect error.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + mock_russound.connect.side_effect = TimeoutError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + # Recover with correct information + mock_russound.connect.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_no_primary_controller( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we handle no primary controller error.""" + mock_russound.enumerate_controllers.return_value = {} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + user_input = MOCK_CONFIG + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_primary_controller"} + + # Recover with correct information + mock_russound.enumerate_controllers.return_value = MOCK_CONTROLLERS + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_russound: AsyncMock +) -> None: + """Test we import a config entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=MOCK_CONFIG, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == MODEL + assert result["data"] == MOCK_CONFIG + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_import_cannot_connect( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test we handle import cannot connect error.""" + mock_russound.connect.side_effect = TimeoutError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +async def test_import_no_primary_controller( + hass: HomeAssistant, mock_russound: AsyncMock +) -> None: + """Test import with no primary controller error.""" + mock_russound.enumerate_controllers.return_value = {} + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_CONFIG + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_primary_controller" diff --git a/tests/components/sabnzbd/conftest.py b/tests/components/sabnzbd/conftest.py index 7d68d3108f0..b5450e5134f 100644 --- a/tests/components/sabnzbd/conftest.py +++ b/tests/components/sabnzbd/conftest.py @@ -1,9 +1,9 @@ """Configuration for Sabnzbd tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index 8d38adad06d..752bce3b960 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Awaitable, Callable, Generator from datetime import datetime from socket import AddressFamily # pylint: disable=no-name-in-module from typing import Any @@ -19,16 +19,12 @@ from samsungtvws.encrypted.remote import SamsungTVEncryptedWSAsyncRemote from samsungtvws.event import ED_INSTALLED_APP_EVENT from samsungtvws.exceptions import ResponseError from samsungtvws.remote import ChannelEmitCommand -from typing_extensions import Generator from homeassistant.components.samsungtv.const import WEBSOCKET_SSL_PORT -from homeassistant.core import HomeAssistant, ServiceCall import homeassistant.util.dt as dt_util from .const import SAMPLE_DEVICE_INFO_UE48JU6400, SAMPLE_DEVICE_INFO_WIFI -from tests.common import async_mock_service - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -40,7 +36,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -300,9 +296,3 @@ def mac_address_fixture() -> Mock: """Patch getmac.get_mac_address.""" with patch("getmac.get_mac_address", return_value=None) as mac: yield mac - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") diff --git a/tests/components/samsungtv/test_device_trigger.py b/tests/components/samsungtv/test_device_trigger.py index e16ea718cbb..acc7ecb904d 100644 --- a/tests/components/samsungtv/test_device_trigger.py +++ b/tests/components/samsungtv/test_device_trigger.py @@ -45,7 +45,9 @@ async def test_get_triggers( @pytest.mark.usefixtures("remoteencws", "rest_api") async def test_if_fires_on_turn_on_request( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -95,11 +97,11 @@ async def test_if_fires_on_turn_on_request( ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == entity_id - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == entity_id + assert service_calls[2].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/samsungtv/test_diagnostics.py b/tests/components/samsungtv/test_diagnostics.py index 7b20002ae5b..b1bdf034bc1 100644 --- a/tests/components/samsungtv/test_diagnostics.py +++ b/tests/components/samsungtv/test_diagnostics.py @@ -16,6 +16,7 @@ from .const import ( SAMPLE_DEVICE_INFO_WIFI, ) +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -29,6 +30,7 @@ async def test_entry_diagnostics( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -43,6 +45,7 @@ async def test_entry_diagnostics( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -65,6 +68,7 @@ async def test_entry_diagnostics_encrypted( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -80,6 +84,7 @@ async def test_entry_diagnostics_encrypted( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, @@ -102,6 +107,7 @@ async def test_entry_diagnostics_encrypte_offline( assert await get_diagnostics_for_config_entry(hass, hass_client, config_entry) == { "entry": { + "created_at": ANY, "data": { "host": "fake_host", "ip_address": "test", @@ -116,6 +122,7 @@ async def test_entry_diagnostics_encrypte_offline( "domain": "samsungtv", "entry_id": "123456", "minor_version": 2, + "modified_at": ANY, "options": {}, "pref_disable_new_entities": False, "pref_disable_polling": False, diff --git a/tests/components/samsungtv/test_trigger.py b/tests/components/samsungtv/test_trigger.py index 6607c60b8e8..8076ceb2807 100644 --- a/tests/components/samsungtv/test_trigger.py +++ b/tests/components/samsungtv/test_trigger.py @@ -21,7 +21,7 @@ from tests.common import MockEntity, MockEntityPlatform @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_device_id( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_domain: str, ) -> None: @@ -60,14 +60,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() # Ensure WOL backup is called when trigger not present with patch( @@ -78,14 +78,14 @@ async def test_turn_on_trigger_device_id( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 mock_send_magic_packet.assert_called() @pytest.mark.usefixtures("remoteencws", "rest_api") @pytest.mark.parametrize("entity_domain", ["media_player", "remote"]) async def test_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall], entity_domain: str + hass: HomeAssistant, service_calls: list[ServiceCall], entity_domain: str ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_samsungtv_entry(hass, MOCK_ENTRYDATA_ENCRYPTED_WS) @@ -119,9 +119,9 @@ async def test_turn_on_trigger_entity_id( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == entity_id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == entity_id + assert service_calls[1].data["id"] == 0 @pytest.mark.usefixtures("remoteencws", "rest_api") diff --git a/tests/components/sanix/conftest.py b/tests/components/sanix/conftest.py index 86eaa870770..405cad8b60b 100644 --- a/tests/components/sanix/conftest.py +++ b/tests/components/sanix/conftest.py @@ -1,5 +1,6 @@ """Sanix tests configuration.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, patch from zoneinfo import ZoneInfo @@ -16,7 +17,6 @@ from sanix import ( ATTR_API_TIME, ) from sanix.models import Measurement -from typing_extensions import Generator from homeassistant.components.sanix.const import CONF_SERIAL_NUMBER, DOMAIN from homeassistant.const import CONF_TOKEN diff --git a/tests/components/schedule/test_init.py b/tests/components/schedule/test_init.py index c43b2500ccb..7cd59f19033 100644 --- a/tests/components/schedule/test_init.py +++ b/tests/components/schedule/test_init.py @@ -31,11 +31,12 @@ from homeassistant.const import ( CONF_ICON, CONF_ID, CONF_NAME, + EVENT_STATE_CHANGED, SERVICE_RELOAD, STATE_OFF, STATE_ON, ) -from homeassistant.core import EVENT_STATE_CHANGED, Context, HomeAssistant +from homeassistant.core import Context, HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component diff --git a/tests/components/schlage/conftest.py b/tests/components/schlage/conftest.py index dcb6bc52a7b..9d61bb877d9 100644 --- a/tests/components/schlage/conftest.py +++ b/tests/components/schlage/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Schlage tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, create_autospec, patch from pyschlage.lock import Lock import pytest -from typing_extensions import Generator from homeassistant.components.schlage.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/scrape/conftest.py b/tests/components/scrape/conftest.py index f6109dbc19a..5b84f4fd44a 100644 --- a/tests/components/scrape/conftest.py +++ b/tests/components/scrape/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, patch import uuid import pytest -from typing_extensions import Generator from homeassistant.components.rest.data import DEFAULT_TIMEOUT from homeassistant.components.rest.schema import DEFAULT_METHOD, DEFAULT_VERIFY_SSL diff --git a/tests/components/screenlogic/__init__.py b/tests/components/screenlogic/__init__.py index 9c8a21b1ba4..169c1f28900 100644 --- a/tests/components/screenlogic/__init__.py +++ b/tests/components/screenlogic/__init__.py @@ -20,7 +20,7 @@ GATEWAY_IMPORT_PATH = "homeassistant.components.screenlogic.ScreenLogicGateway" GATEWAY_DISCOVERY_IMPORT_PATH = "homeassistant.components.screenlogic.coordinator.async_discover_gateways_by_unique_id" -def num_key_string_to_int(data: dict) -> None: +def num_key_string_to_int(data: dict) -> dict: """Convert all string number dict keys to integer. This needed for screenlogicpy's data dict format. diff --git a/tests/components/screenlogic/test_diagnostics.py b/tests/components/screenlogic/test_diagnostics.py index c6d6ea60e87..77e1ce58dad 100644 --- a/tests/components/screenlogic/test_diagnostics.py +++ b/tests/components/screenlogic/test_diagnostics.py @@ -4,6 +4,7 @@ from unittest.mock import DEFAULT, patch from screenlogicpy import ScreenLogicGateway from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -56,4 +57,4 @@ async def test_diagnostics( hass, hass_client, mock_config_entry ) - assert diag == snapshot + assert diag == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/screenlogic/test_services.py b/tests/components/screenlogic/test_services.py index d175ea27c84..0fc79fad0e5 100644 --- a/tests/components/screenlogic/test_services.py +++ b/tests/components/screenlogic/test_services.py @@ -1,12 +1,12 @@ """Tests for ScreenLogic integration service calls.""" +from collections.abc import AsyncGenerator from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest from screenlogicpy import ScreenLogicGateway from screenlogicpy.device_const.system import COLOR_MODE -from typing_extensions import AsyncGenerator from homeassistant.components.screenlogic import DOMAIN from homeassistant.components.screenlogic.const import ( diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index b956aa588cb..aef22b93bcf 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -74,7 +74,7 @@ async def test_confirmable_notification( "message": "Throw ring in mountain?", "confirm_action": [ { - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", "target": {"entity_id": "mount.doom"}, } ], diff --git a/tests/components/script/test_init.py b/tests/components/script/test_init.py index 2352e9c64e6..a5eda3757a9 100644 --- a/tests/components/script/test_init.py +++ b/tests/components/script/test_init.py @@ -3,7 +3,7 @@ import asyncio from datetime import timedelta from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import pytest @@ -29,8 +29,8 @@ from homeassistant.core import ( callback, split_entity_id, ) -from homeassistant.exceptions import ServiceNotFound -from homeassistant.helpers import device_registry as dr, entity_registry as er, template +from homeassistant.exceptions import ServiceNotFound, TemplateError +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.event import async_track_state_change from homeassistant.helpers.script import ( SCRIPT_MODE_CHOICES, @@ -47,11 +47,13 @@ import homeassistant.util.dt as dt_util from tests.common import ( MockConfigEntry, + MockUser, async_fire_time_changed, async_mock_service, mock_restore_cache, ) from tests.components.logbook.common import MockRow, mock_humanify +from tests.components.repairs import get_repairs from tests.typing import WebSocketGenerator ENTITY_ID = "script.test" @@ -83,7 +85,7 @@ async def test_passing_variables(hass: HomeAssistant) -> None: "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } @@ -113,8 +115,14 @@ async def test_passing_variables(hass: HomeAssistant) -> None: @pytest.mark.parametrize("toggle", [False, True]) -async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: - """Verify turn_on, turn_off & toggle services.""" +@pytest.mark.parametrize("action_schema_variations", ["action", "service"]) +async def test_turn_on_off_toggle( + hass: HomeAssistant, toggle: bool, action_schema_variations: str +) -> None: + """Verify turn_on, turn_off & toggle services. + + Ensures backward compatibility with the old service action schema is maintained. + """ event = "test_event" event_mock = Mock() @@ -130,9 +138,15 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: async_track_state_change(hass, ENTITY_ID, state_listener, to_state="on") if toggle: - turn_off_step = {"service": "script.toggle", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.toggle", + "entity_id": ENTITY_ID, + } else: - turn_off_step = {"service": "script.turn_off", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.turn_off", + "entity_id": ENTITY_ID, + } assert await async_setup_component( hass, "script", @@ -163,7 +177,7 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: invalid_configs = [ {"test": {}}, {"test hello world": {"sequence": [{"event": "bla"}]}}, - {"test": {"sequence": {"event": "test_event", "service": "homeassistant.turn_on"}}}, + {"test": {"sequence": {"event": "test_event", "action": "homeassistant.turn_on"}}}, ] @@ -178,7 +192,7 @@ invalid_configs = [ "test": { "sequence": { "event": "test_event", - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", } } }, @@ -233,7 +247,7 @@ async def test_bad_config_validation_critical( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -252,13 +266,14 @@ async def test_bad_config_validation_critical( @pytest.mark.parametrize( - ("object_id", "broken_config", "problem", "details"), + ("object_id", "broken_config", "problem", "details", "issue"), [ ( "bad_script", {}, "could not be validated", "required key not provided @ data['sequence']", + "validation_failed_schema", ), ( "bad_script", @@ -270,18 +285,22 @@ async def test_bad_config_validation_critical( "state": "blah", }, }, - "failed to setup actions", + "failed to setup sequence", "Unknown entity registry entry abcdabcdabcdabcdabcdabcdabcdabcd.", + "validation_failed_sequence", ), ], ) async def test_bad_config_validation( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, + hass_admin_user: MockUser, object_id, broken_config, problem, details, + issue, ) -> None: """Test bad script configuration which can be detected during validation.""" assert await async_setup_component( @@ -293,7 +312,7 @@ async def test_bad_config_validation( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -301,11 +320,22 @@ async def test_bad_config_validation( }, ) - # Check we get the expected error message + # Check we get the expected error message and issue assert ( f"Script with alias 'bad_script' {problem} and has been disabled: {details}" in caplog.text ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + assert issues[0]["issue_id"] == f"script.bad_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/bad_script", + "entity_id": "script.bad_script", + "error": ANY, + "name": "bad_script", + } + assert issues[0]["translation_placeholders"]["error"].startswith(details) # Make sure both scripts are setup assert set(hass.states.async_entity_ids("script")) == { @@ -315,6 +345,31 @@ async def test_bad_config_validation( # The script failing validation should be unavailable assert hass.states.get("script.bad_script").state == STATE_UNAVAILABLE + # Reloading the automation with fixed config should clear the issue + with patch( + "homeassistant.config.load_yaml_config_file", + autospec=True, + return_value={ + script.DOMAIN: { + object_id: { + "alias": "bad_script", + "sequence": { + "action": "test.automation", + "entity_id": "hello.world", + }, + }, + } + }, + ): + await hass.services.async_call( + script.DOMAIN, + SERVICE_RELOAD, + context=Context(user_id=hass_admin_user.id), + blocking=True, + ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 0 + @pytest.mark.parametrize("running", ["no", "same", "different"]) async def test_reload_service(hass: HomeAssistant, running) -> None: @@ -387,7 +442,7 @@ async def test_reload_unchanged_does_not_stop( "sequence": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.script"}, + {"action": "test.script"}, ], } } @@ -430,13 +485,13 @@ async def test_reload_unchanged_does_not_stop( [ { "test": { - "sequence": [{"service": "test.script"}], + "sequence": [{"action": "test.script"}], } }, # A script using templates { "test": { - "sequence": [{"service": "{{ 'test.script' }}"}], + "sequence": [{"action": "{{ 'test.script' }}"}], } }, # A script using blueprint @@ -623,7 +678,7 @@ async def test_logging_script_error( assert await async_setup_component( hass, "script", - {"script": {"hello": {"sequence": [{"service": "non.existing"}]}}}, + {"script": {"hello": {"sequence": [{"action": "non.existing"}]}}}, ) with pytest.raises(ServiceNotFound) as err: await hass.services.async_call("script", "hello", blocking=True) @@ -647,7 +702,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: """Test async_set_service_schema for the script integration.""" script_config = { DOMAIN: { - "test1": {"sequence": [{"service": "homeassistant.restart"}]}, + "test1": {"sequence": [{"action": "homeassistant.restart"}]}, "test2": { "description": "test2", "fields": { @@ -656,7 +711,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: "example": "param_example", } }, - "sequence": [{"service": "homeassistant.restart"}], + "sequence": [{"action": "homeassistant.restart"}], }, } } @@ -752,11 +807,11 @@ async def test_extraction_functions( "test1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -766,15 +821,15 @@ async def test_extraction_functions( "device_id": device_in_both.id, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ] @@ -782,7 +837,7 @@ async def test_extraction_functions( "test2": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -808,7 +863,7 @@ async def test_extraction_functions( "test3": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -818,27 +873,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -985,11 +1040,11 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: """Test calling script concurrently or not.""" if concurrently: call_script_2 = { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script2"}, } else: - call_script_2 = {"service": "script.script2"} + call_script_2 = {"action": "script.script2"} assert await async_setup_component( hass, "script", @@ -1002,17 +1057,17 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: { "wait_template": "{{ is_state('input_boolean.test1', 'on') }}" }, - {"service": "test.script", "data": {"value": "script1"}}, + {"action": "test.script", "data": {"value": "script1"}}, ], }, "script2": { "mode": "parallel", "sequence": [ - {"service": "test.script", "data": {"value": "script2a"}}, + {"action": "test.script", "data": {"value": "script2a"}}, { "wait_template": "{{ is_state('input_boolean.test2', 'on') }}" }, - {"service": "test.script", "data": {"value": "script2b"}}, + {"action": "test.script", "data": {"value": "script2b"}}, ], }, } @@ -1083,7 +1138,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", "templated_config_var": "{{ templated_config_var }}", @@ -1099,7 +1154,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1112,7 +1167,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1154,7 +1209,7 @@ async def test_script_variables( assert mock_calls[2].data["value"] == "from_service" assert "Error rendering variables" not in caplog.text - with pytest.raises(template.TemplateError): + with pytest.raises(TemplateError): await hass.services.async_call("script", "script3", blocking=True) assert "Error rendering variables" in caplog.text assert len(mock_calls) == 3 @@ -1178,7 +1233,7 @@ async def test_script_this_var_always( "script1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "this_template": "{{this.entity_id}}", }, @@ -1263,8 +1318,8 @@ async def test_recursive_script( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1313,26 +1368,26 @@ async def test_recursive_script_indirect( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script2"}, + {"action": "script.script2"}, ], }, "script2": { "mode": script_mode, "sequence": [ - {"service": "script.script3"}, + {"action": "script.script3"}, ], }, "script3": { "mode": script_mode, "sequence": [ - {"service": "script.script4"}, + {"action": "script.script4"}, ], }, "script4": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1397,10 +1452,10 @@ async def test_recursive_script_turn_on( "condition": "template", "value_template": "{{ request == 'step_2' }}", }, - "sequence": {"service": "test.script_done"}, + "sequence": {"action": "test.script_done"}, }, "default": { - "service": "script.turn_on", + "action": "script.turn_on", "data": { "entity_id": "script.script1", "variables": {"request": "step_2"}, @@ -1408,7 +1463,7 @@ async def test_recursive_script_turn_on( }, }, { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script1"}, }, ], @@ -1470,7 +1525,7 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "sequence": [{"service": "light.turn_on"}], + "sequence": [{"action": "light.turn_on"}], } assert await async_setup_component( hass, @@ -1534,7 +1589,7 @@ async def test_script_service_changed_entity_id( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "{{ this.entity_id }}"}, } } @@ -1563,9 +1618,7 @@ async def test_script_service_changed_entity_id( assert calls[1].data["entity_id"] == "script.custom_entity_id_2" -async def test_blueprint_automation( - hass: HomeAssistant, calls: list[ServiceCall] -) -> None: +async def test_blueprint_script(hass: HomeAssistant, calls: list[ServiceCall]) -> None: """Test blueprint script.""" assert await async_setup_component( hass, @@ -1617,12 +1670,13 @@ async def test_blueprint_automation( "a_number": 5, }, "Blueprint 'Call service' generated invalid script", - "value should be a string for dictionary value @ data['sequence'][0]['service']", + "value should be a string for dictionary value @ data['sequence'][0]['action']", ), ], ) async def test_blueprint_script_bad_config( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, caplog: pytest.LogCaptureFixture, blueprint_inputs, problem, @@ -1646,9 +1700,24 @@ async def test_blueprint_script_bad_config( assert problem in caplog.text assert details in caplog.text + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + issue = "validation_failed_blueprint" + assert issues[0]["issue_id"] == f"script.test_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/test_script", + "entity_id": "script.test_script", + "error": ANY, + "name": "test_script", + } + assert issues[0]["translation_placeholders"]["error"].startswith(details) + async def test_blueprint_script_fails_substitution( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, ) -> None: """Test blueprint script with bad inputs.""" with patch( @@ -1677,6 +1746,18 @@ async def test_blueprint_script_fails_substitution( in caplog.text ) + issues = await get_repairs(hass, hass_ws_client) + assert len(issues) == 1 + issue = "validation_failed_blueprint" + assert issues[0]["issue_id"] == f"script.test_script_{issue}" + assert issues[0]["translation_key"] == issue + assert issues[0]["translation_placeholders"] == { + "edit": "/config/script/edit/test_script", + "entity_id": "script.test_script", + "error": "No substitution found for input blah", + "name": "test_script", + } + @pytest.mark.parametrize("response", [{"value": 5}, '{"value": 5}']) async def test_responses(hass: HomeAssistant, response: Any) -> None: @@ -1770,10 +1851,10 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "sequence": [ { "parallel": [ - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, ] } ] @@ -1781,7 +1862,7 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "test_sub": { "mode": "queued", "sequence": [ - {"service": "test.simulated_remote"}, + {"action": "test.simulated_remote"}, ], }, } diff --git a/tests/components/script/test_recorder.py b/tests/components/script/test_recorder.py index ca915cede6f..6358093014a 100644 --- a/tests/components/script/test_recorder.py +++ b/tests/components/script/test_recorder.py @@ -52,7 +52,7 @@ async def test_exclude_attributes( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } diff --git a/tests/components/search/test_init.py b/tests/components/search/test_init.py index a817fbfc39e..9b2b959e0dd 100644 --- a/tests/components/search/test_init.py +++ b/tests/components/search/test_init.py @@ -534,12 +534,14 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.wled_device") == { ItemType.AREA: {living_room_area.id}, ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.AUTOMATION, "automation.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -561,6 +563,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.AUTOMATION, "automation.scene") == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -574,6 +577,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.AUTOMATION, "automation.script") == { @@ -589,6 +593,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -611,6 +616,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -624,6 +630,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -639,6 +646,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.LABEL: {label_christmas.label_id}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, @@ -652,6 +660,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.device", "script.hue"}, } @@ -664,6 +673,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {"scene.scene_wled_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.wled"}, } @@ -673,6 +683,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.FLOOR: {second_floor.floor_id}, ItemType.GROUP: {"group.wled", "group.wled_hue"}, + ItemType.INTEGRATION: {"wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert search(ItemType.ENTITY, hue_segment_1_entity.entity_id) == { @@ -681,6 +692,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.LABEL: {label_energy.label_id}, ItemType.SCENE: {"scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {"script.hue"}, @@ -691,6 +703,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, ItemType.GROUP: {"group.hue", "group.wled_hue"}, + ItemType.INTEGRATION: {"hue"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } assert not search(ItemType.ENTITY, "automation.wled") @@ -722,6 +735,7 @@ async def test_search( } assert search(ItemType.ENTITY, "light.wled_config_entry_source") == { ItemType.CONFIG_ENTRY: {wled_config_entry.entry_id}, + ItemType.INTEGRATION: {"wled"}, } assert not search(ItemType.FLOOR, "unknown") @@ -780,6 +794,7 @@ async def test_search( wled_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.GROUP, "group.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -790,6 +805,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.GROUP, "group.wled_hue") == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -803,6 +819,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCRIPT: {"script.group"}, } @@ -841,6 +858,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCENE, "scene.scene_hue_seg_1") == { ItemType.AREA: {kitchen_area.id}, @@ -848,6 +866,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCENE, scene_wled_hue_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, living_room_area.id, kitchen_area.id}, @@ -861,6 +880,7 @@ async def test_search( hue_segment_2_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -880,6 +900,7 @@ async def test_search( ItemType.DEVICE: {wled_device.id}, ItemType.ENTITY: {wled_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"wled"}, } assert search(ItemType.SCRIPT, "script.hue") == { ItemType.AREA: {kitchen_area.id}, @@ -887,6 +908,7 @@ async def test_search( ItemType.DEVICE: {hue_device.id}, ItemType.ENTITY: {hue_segment_1_entity.entity_id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.script_with_templated_services") == {} assert search(ItemType.SCRIPT, "script.device") == { @@ -894,6 +916,7 @@ async def test_search( ItemType.CONFIG_ENTRY: {hue_config_entry.entry_id}, ItemType.DEVICE: {hue_device.id}, ItemType.FLOOR: {first_floor.floor_id}, + ItemType.INTEGRATION: {"hue"}, } assert search(ItemType.SCRIPT, "script.floor") == { ItemType.FLOOR: {first_floor.floor_id}, @@ -915,6 +938,7 @@ async def test_search( }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, ItemType.GROUP: {"group.wled_hue"}, + ItemType.INTEGRATION: {"hue", "wled"}, } assert search(ItemType.SCRIPT, script_scene_entity.entity_id) == { ItemType.AREA: {bedroom_area.id, kitchen_area.id, living_room_area.id}, @@ -928,6 +952,7 @@ async def test_search( scene_wled_hue_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.LABEL: {label_other.label_id}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, } @@ -944,6 +969,7 @@ async def test_search( script_scene_entity.entity_id, }, ItemType.FLOOR: {first_floor.floor_id, second_floor.floor_id}, + ItemType.INTEGRATION: {"hue", "wled"}, ItemType.SCENE: {scene_wled_hue_entity.entity_id}, ItemType.SCRIPT: {script_scene_entity.entity_id}, } @@ -981,6 +1007,7 @@ async def test_search( ), ItemType.CONFIG_ENTRY: [hue_config_entry.entry_id], ItemType.FLOOR: [first_floor.floor_id], + ItemType.INTEGRATION: ["hue"], ItemType.SCENE: unordered( ["scene.scene_hue_seg_1", scene_wled_hue_entity.entity_id] ), diff --git a/tests/components/season/conftest.py b/tests/components/season/conftest.py index a45a2078d9b..c7458b0a2e1 100644 --- a/tests/components/season/conftest.py +++ b/tests/components/season/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.season.const import DOMAIN, TYPE_ASTRONOMICAL from homeassistant.const import CONF_TYPE diff --git a/tests/components/season/test_sensor.py b/tests/components/season/test_sensor.py index ffc8e9f1a07..881192c95f0 100644 --- a/tests/components/season/test_sensor.py +++ b/tests/components/season/test_sensor.py @@ -70,6 +70,7 @@ def idfn(val): """Provide IDs for pytest parametrize.""" if isinstance(val, (datetime)): return val.strftime("%Y%m%d") + return None @pytest.mark.parametrize(("type", "day", "expected"), NORTHERN_PARAMETERS, ids=idfn) diff --git a/tests/components/select/test_device_condition.py b/tests/components/select/test_device_condition.py index e60df688658..fc35757fa67 100644 --- a/tests/components/select/test_device_condition.py +++ b/tests/components/select/test_device_condition.py @@ -21,17 +21,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_conditions( @@ -115,7 +105,7 @@ async def test_get_conditions_hidden_auxiliary( async def test_if_selected_option( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -181,7 +171,7 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set( entry.entity_id, "option1", {"options": ["option1", "option2"]} @@ -189,8 +179,8 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["result"] == "option1 - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["result"] == "option1 - event - test_event1" hass.states.async_set( entry.entity_id, "option2", {"options": ["option1", "option2"]} @@ -198,13 +188,13 @@ async def test_if_selected_option( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["result"] == "option2 - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["result"] == "option2 - event - test_event2" async def test_if_selected_option_legacy( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, ) -> None: @@ -252,8 +242,8 @@ async def test_if_selected_option_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["result"] == "option1 - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["result"] == "option1 - event - test_event1" async def test_get_condition_capabilities( diff --git a/tests/components/select/test_device_trigger.py b/tests/components/select/test_device_trigger.py index c7a55c56202..dbb4e23d785 100644 --- a/tests/components/select/test_device_trigger.py +++ b/tests/components/select/test_device_trigger.py @@ -21,17 +21,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -117,7 +107,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -210,27 +200,27 @@ async def test_if_fires_on_state_change( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) # Test triggering device trigger with a from state hass.states.async_set(entry.entity_id, "option3") await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"from - device - {entry.entity_id} - option2 - option3 - None - 0" ) # Test triggering device trigger with both a from and to state hass.states.async_set(entry.entity_id, "option1") await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 assert ( - calls[2].data["some"] + service_calls[2].data["some"] == f"from-to - device - {entry.entity_id} - option3 - option1 - None - 0" ) @@ -239,7 +229,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -289,9 +279,9 @@ async def test_if_fires_on_state_change_legacy( # Test triggering device trigger with a to state hass.states.async_set(entry.entity_id, "option2") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"to - device - {entry.entity_id} - option1 - option2 - None - 0" ) diff --git a/tests/components/sensibo/snapshots/test_diagnostics.ambr b/tests/components/sensibo/snapshots/test_diagnostics.ambr index c911a7629be..a33209f7c88 100644 --- a/tests/components/sensibo/snapshots/test_diagnostics.ambr +++ b/tests/components/sensibo/snapshots/test_diagnostics.ambr @@ -1,246 +1,5 @@ # serializer version: 1 # name: test_diagnostics - dict({ - 'modes': dict({ - 'auto': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'cool': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'dry': dict({ - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 64, - 66, - 68, - ]), - }), - }), - }), - 'fan': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - }), - }), - 'heat': dict({ - 'fanLevels': list([ - 'quiet', - 'low', - 'medium', - ]), - 'horizontalSwing': list([ - 'stopped', - 'fixedLeft', - 'fixedCenterLeft', - ]), - 'light': list([ - 'on', - 'off', - ]), - 'swing': list([ - 'stopped', - 'fixedTop', - 'fixedMiddleTop', - ]), - 'temperatures': dict({ - 'C': dict({ - 'isNative': True, - 'values': list([ - 10, - 16, - 17, - 18, - 19, - 20, - ]), - }), - 'F': dict({ - 'isNative': False, - 'values': list([ - 63, - 64, - 66, - ]), - }), - }), - }), - }), - }) -# --- -# name: test_diagnostics.1 - dict({ - 'low': 'low', - 'medium': 'medium', - 'quiet': 'quiet', - }) -# --- -# name: test_diagnostics.2 - dict({ - 'fixedmiddletop': 'fixedMiddleTop', - 'fixedtop': 'fixedTop', - 'stopped': 'stopped', - }) -# --- -# name: test_diagnostics.3 - dict({ - 'fixedcenterleft': 'fixedCenterLeft', - 'fixedleft': 'fixedLeft', - 'stopped': 'stopped', - }) -# --- -# name: test_diagnostics.4 - dict({ - 'fanlevel': 'low', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'heat', - 'on': True, - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', - }) -# --- -# name: test_diagnostics.5 - dict({ - 'fanlevel': 'high', - 'horizontalswing': 'stopped', - 'light': 'on', - 'mode': 'cool', - 'on': True, - 'swing': 'stopped', - 'targettemperature': 21, - 'temperatureunit': 'c', - }) -# --- -# name: test_diagnostics.6 - dict({ - }) -# --- -# name: test_diagnostics[full_snapshot] dict({ 'AAZZAAZZ': dict({ 'ac_states': dict({ diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index 6b4aedab828..b5a7be7bde0 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -400,6 +400,10 @@ async def test_climate_temperatures( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "Success"}}, ), + pytest.raises( + ServiceValidationError, + match="Provided temperature 24.0 is not valid. Accepted range is 10 to 20", + ), ): await hass.services.async_call( CLIMATE_DOMAIN, @@ -410,7 +414,7 @@ async def test_climate_temperatures( await hass.async_block_till_done() state2 = hass.states.get("climate.hallway") - assert state2.attributes["temperature"] == 20 + assert state2.attributes["temperature"] == 19 with ( patch( diff --git a/tests/components/sensibo/test_diagnostics.py b/tests/components/sensibo/test_diagnostics.py index 1fe72cca0f3..0dc1f2c25e9 100644 --- a/tests/components/sensibo/test_diagnostics.py +++ b/tests/components/sensibo/test_diagnostics.py @@ -3,6 +3,7 @@ from __future__ import annotations from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -10,8 +11,6 @@ from homeassistant.core import HomeAssistant from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator -EXCLUDE_ATTRIBUTES = {"full_features"} - async def test_diagnostics( hass: HomeAssistant, @@ -24,16 +23,6 @@ async def test_diagnostics( diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) - assert diag["ABC999111"]["full_capabilities"] == snapshot - assert diag["ABC999111"]["fan_modes_translated"] == snapshot - assert diag["ABC999111"]["swing_modes_translated"] == snapshot - assert diag["ABC999111"]["horizontal_swing_modes_translated"] == snapshot - assert diag["ABC999111"]["smart_low_state"] == snapshot - assert diag["ABC999111"]["smart_high_state"] == snapshot - assert diag["ABC999111"]["pure_conf"] == snapshot - - def limit_attrs(prop, path): - exclude_attrs = EXCLUDE_ATTRIBUTES - return prop in exclude_attrs - - assert diag == snapshot(name="full_snapshot", exclude=limit_attrs) + assert diag == snapshot( + exclude=props("full_features", "created_at", "modified_at"), + ) diff --git a/tests/components/sensor/test_device_condition.py b/tests/components/sensor/test_device_condition.py index 3bc9a660e93..d9a9900b8b1 100644 --- a/tests/components/sensor/test_device_condition.py +++ b/tests/components/sensor/test_device_condition.py @@ -27,7 +27,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -37,12 +36,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( "device_class", [ @@ -470,7 +463,6 @@ async def test_if_state_not_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for bad value conditions.""" @@ -513,7 +505,7 @@ async def test_if_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -559,22 +551,22 @@ async def test_if_state_above( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -582,7 +574,7 @@ async def test_if_state_above_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -628,22 +620,22 @@ async def test_if_state_above_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -651,7 +643,7 @@ async def test_if_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -697,22 +689,22 @@ async def test_if_state_below( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -720,7 +712,7 @@ async def test_if_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -767,30 +759,30 @@ async def test_if_state_between( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "event - test_event1" hass.states.async_set(entry.entity_id, 21) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entry.entity_id, 19) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "event - test_event1" diff --git a/tests/components/sensor/test_device_trigger.py b/tests/components/sensor/test_device_trigger.py index 87a6d9929c3..bb560c824d3 100644 --- a/tests/components/sensor/test_device_trigger.py +++ b/tests/components/sensor/test_device_trigger.py @@ -31,7 +31,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -41,12 +40,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.mark.parametrize( "device_class", [ @@ -427,7 +420,6 @@ async def test_if_fires_not_on_above_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, ) -> None: """Test for value triggers firing.""" @@ -467,7 +459,7 @@ async def test_if_fires_on_state_above( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -513,17 +505,18 @@ async def test_if_fires_on_state_above( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -532,7 +525,7 @@ async def test_if_fires_on_state_below( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -578,17 +571,18 @@ async def test_if_fires_on_state_below( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 11 - 9 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 11 - 9 - None" ) @@ -597,7 +591,7 @@ async def test_if_fires_on_state_between( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -644,28 +638,30 @@ async def test_if_fires_on_state_between( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) hass.states.async_set(entry.entity_id, 21) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 hass.states.async_set(entry.entity_id, 19) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] == f"bat_low device - {entry.entity_id} - 21 - 19 - None" + service_calls[1].data["some"] + == f"bat_low device - {entry.entity_id} - 21 - 19 - None" ) @@ -674,7 +670,7 @@ async def test_if_fires_on_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for value triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -720,17 +716,18 @@ async def test_if_fires_on_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 9) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] == f"bat_low device - {entry.entity_id} - 9 - 11 - None" + service_calls[0].data["some"] + == f"bat_low device - {entry.entity_id} - 9 - 11 - None" ) @@ -739,7 +736,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -786,17 +783,17 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, 10) hass.states.async_set(entry.entity_id, 11) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - 10 - 11 - 0:00:05" ) diff --git a/tests/components/sensor/test_init.py b/tests/components/sensor/test_init.py index 126e327f364..2504ea80d84 100644 --- a/tests/components/sensor/test_init.py +++ b/tests/components/sensor/test_init.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator from datetime import UTC, date, datetime from decimal import Decimal from types import ModuleType from typing import Any import pytest -from typing_extensions import Generator from homeassistant.components import sensor from homeassistant.components.number import NumberDeviceClass @@ -418,7 +418,7 @@ async def test_restore_sensor_save_state( assert state["entity_id"] == entity0.entity_id extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] assert extra_data == expected_extra_data - assert type(extra_data["native_value"]) == native_value_type + assert type(extra_data["native_value"]) is native_value_type @pytest.mark.parametrize( @@ -479,7 +479,7 @@ async def test_restore_sensor_restore_state( assert hass.states.get(entity0.entity_id) assert entity0.native_value == native_value - assert type(entity0.native_value) == native_value_type + assert type(entity0.native_value) is native_value_type assert entity0.native_unit_of_measurement == uom @@ -942,7 +942,21 @@ async def test_custom_unit_change( "1000000", "1093613", SensorDeviceClass.DISTANCE, - ) + ), + # Volume Storage (subclass of Volume) + ( + US_CUSTOMARY_SYSTEM, + UnitOfVolume.LITERS, + UnitOfVolume.GALLONS, + UnitOfVolume.GALLONS, + UnitOfVolume.FLUID_OUNCES, + 1000, + "1000", + "264", + "264", + "33814", + SensorDeviceClass.VOLUME_STORAGE, + ), ], ) async def test_unit_conversion_priority( diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 62cb66d2053..27fab9c0b3b 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta import math from statistics import mean -from typing import Literal +from typing import Any, Literal from unittest.mock import patch from freezegun import freeze_time @@ -50,9 +50,14 @@ from tests.components.recorder.common import ( async_recorder_block_till_done, async_wait_recording_done, do_adhoc_statistics, + get_start_time, statistics_during_period, ) -from tests.typing import RecorderInstanceGenerator, WebSocketGenerator +from tests.typing import ( + MockHAClientWebSocket, + RecorderInstanceGenerator, + WebSocketGenerator, +) BATTERY_SENSOR_ATTRIBUTES = { "device_class": "battery", @@ -95,7 +100,7 @@ KW_SENSOR_ATTRIBUTES = { @pytest.fixture async def mock_recorder_before_hass( - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, ) -> None: """Set up recorder patches.""" @@ -116,6 +121,33 @@ async def async_list_statistic_ids( ) +async def assert_statistic_ids( + hass: HomeAssistant, + expected_result: list[dict[str, Any]], +) -> None: + """Assert statistic ids.""" + with session_scope(hass=hass, read_only=True) as session: + db_states = list(session.query(StatisticsMeta)) + assert len(db_states) == len(expected_result) + for i, db_state in enumerate(db_states): + assert db_state.statistic_id == expected_result[i]["statistic_id"] + assert ( + db_state.unit_of_measurement + == expected_result[i]["unit_of_measurement"] + ) + + +async def assert_validation_result( + client: MockHAClientWebSocket, + expected_result: dict[str, list[dict[str, Any]]], +) -> None: + """Assert statistics validation result.""" + await client.send_json_auto_id({"type": "recorder/validate_statistics"}) + response = await client.receive_json() + assert response["success"] + assert response["result"] == expected_result + + @pytest.mark.parametrize( ( "device_class", @@ -163,7 +195,7 @@ async def test_compile_hourly_statistics( max, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -247,7 +279,7 @@ async def test_compile_hourly_statistics_with_some_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -361,7 +393,7 @@ async def test_compile_hourly_statistics_with_all_same_last_updated( If the last updated value is the same we will have a zero duration. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -467,7 +499,7 @@ async def test_compile_hourly_statistics_only_state_is_and_end_of_period( max, ) -> None: """Test compiling hourly statistics when the only state at end of period.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -561,7 +593,7 @@ async def test_compile_hourly_statistics_purged_state_changes( unit_class, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -632,7 +664,7 @@ async def test_compile_hourly_statistics_wrong_unit( attributes, ) -> None: """Test compiling hourly statistics for sensor with unit not matching device class.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -856,7 +888,7 @@ async def test_compile_hourly_sum_statistics_amount( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1040,7 +1072,7 @@ async def test_compile_hourly_sum_statistics_amount_reset_every_state_change( factor, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1163,7 +1195,7 @@ async def test_compile_hourly_sum_statistics_amount_invalid_last_reset( factor, ) -> None: """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1263,7 +1295,7 @@ async def test_compile_hourly_sum_statistics_nan_inf_state( factor, ) -> None: """Test compiling hourly statistics with nan and inf states.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -1398,7 +1430,7 @@ async def test_compile_hourly_sum_statistics_negative_state( offset, ) -> None: """Test compiling hourly statistics with negative states.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) hass.data.pop(loader.DATA_CUSTOM_COMPONENTS) mocksensor = MockSensor(name="custom_sensor") @@ -1406,10 +1438,11 @@ async def test_compile_hourly_sum_statistics_negative_state( setup_test_component_platform(hass, DOMAIN, [mocksensor], built_in=False) await async_setup_component(hass, "homeassistant", {}) - await async_setup_component( - hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} - ) - await hass.async_block_till_done() + with freeze_time(zero) as freezer: + await async_setup_component( + hass, "sensor", {"sensor": [{"platform": "demo"}, {"platform": "test"}]} + ) + await hass.async_block_till_done() attributes = { "device_class": device_class, "state_class": state_class, @@ -1510,7 +1543,7 @@ async def test_compile_hourly_sum_statistics_total_no_reset( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1623,7 +1656,7 @@ async def test_compile_hourly_sum_statistics_total_increasing( factor, ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1736,7 +1769,7 @@ async def test_compile_hourly_sum_statistics_total_increasing_small_dip( factor, ) -> None: """Test small dips in sensor readings do not trigger a reset.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1838,7 +1871,7 @@ async def test_compile_hourly_energy_statistics_unsupported( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -1942,7 +1975,7 @@ async def test_compile_hourly_energy_statistics_multiple( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling multiple hourly statistics.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period2 = period0 + timedelta(minutes=10) period2_end = period0 + timedelta(minutes=15) @@ -2156,7 +2189,7 @@ async def test_compile_hourly_statistics_unchanged( value, ) -> None: """Test compiling hourly statistics, with no changes during the hour.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2199,7 +2232,7 @@ async def test_compile_hourly_statistics_partially_unavailable( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics, with the sensor being partially unavailable.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2268,7 +2301,7 @@ async def test_compile_hourly_statistics_unavailable( sensor.test1 is unavailable and should not have statistics generated sensor.test2 should have statistics generated """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2315,7 +2348,7 @@ async def test_compile_hourly_statistics_fails( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test compiling hourly statistics throws.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2433,30 +2466,29 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( - "_attributes", + "energy_attributes", [{**ENERGY_SENSOR_ATTRIBUTES, "last_reset": 0}, TEMPERATURE_SENSOR_ATTRIBUTES], ) async def test_list_statistic_ids_unsupported( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - _attributes, + energy_attributes: dict[str, Any], ) -> None: """Test listing future statistic ids for unsupported sensor.""" await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) - attributes = dict(_attributes) + attributes = dict(energy_attributes) hass.states.async_set("sensor.test1", 0, attributes=attributes) if "last_reset" in attributes: attributes.pop("unit_of_measurement") hass.states.async_set("last_reset.test2", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" hass.states.async_set("sensor.test3", 0, attributes=attributes) attributes.pop("unit_of_measurement") hass.states.async_set("sensor.test4", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) attributes["state_class"] = "invalid" hass.states.async_set("sensor.test5", 0, attributes=attributes) attributes.pop("state_class") @@ -2492,7 +2524,7 @@ async def test_compile_hourly_statistics_changing_units_1( This tests the case where the recorder cannot convert between the units. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2621,7 +2653,7 @@ async def test_compile_hourly_statistics_changing_units_2( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) - timedelta(seconds=30 * 5) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2700,7 +2732,7 @@ async def test_compile_hourly_statistics_changing_units_3( This tests the behaviour when the sensor units are note supported by any unit converter. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2821,7 +2853,7 @@ async def test_compile_hourly_statistics_convert_units_1( This tests the case where the recorder can convert between the units. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -2980,7 +3012,7 @@ async def test_compile_hourly_statistics_equivalent_units_1( max, ) -> None: """Test compiling hourly statistics where units change from one hour to the next.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3105,7 +3137,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( max, ) -> None: """Test compiling hourly statistics where units change during an hour.""" - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3129,7 +3161,7 @@ async def test_compile_hourly_statistics_equivalent_units_2( ) assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5)) + do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 10)) await async_wait_recording_done(hass) assert "The unit of sensor.test1 is changing" not in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text @@ -3151,9 +3183,9 @@ async def test_compile_hourly_statistics_equivalent_units_2( "sensor.test1": [ { "start": process_timestamp( - zero + timedelta(seconds=30 * 5) + zero + timedelta(seconds=30 * 10) ).timestamp(), - "end": process_timestamp(zero + timedelta(seconds=30 * 15)).timestamp(), + "end": process_timestamp(zero + timedelta(seconds=30 * 20)).timestamp(), "mean": pytest.approx(mean), "min": pytest.approx(min), "max": pytest.approx(max), @@ -3198,7 +3230,7 @@ async def test_compile_hourly_statistics_changing_device_class_1( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3409,7 +3441,7 @@ async def test_compile_hourly_statistics_changing_device_class_2( Device class is ignored, meaning changing device class should not influence the statistics. """ - zero = dt_util.utcnow() + zero = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) @@ -3547,7 +3579,7 @@ async def test_compile_hourly_statistics_changing_state_class( max, ) -> None: """Test compiling hourly statistics where state class changes.""" - period0 = dt_util.utcnow() + period0 = get_start_time(dt_util.utcnow()) period0_end = period1 = period0 + timedelta(minutes=5) period1_end = period0 + timedelta(minutes=10) await async_setup_component(hass, "sensor", {}) @@ -4117,7 +4149,7 @@ async def async_record_states( one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=10 * 5) three = two + timedelta(seconds=40 * 5) - four = three + timedelta(seconds=10 * 5) + four = three + timedelta(seconds=9 * 5) states = {entity_id: []} freezer.move_to(one) @@ -4178,22 +4210,8 @@ async def test_validate_unit_change_convertible( The test also asserts that the sensor's device class is ignored. """ - msg_id = 1 - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4205,14 +4223,20 @@ async def test_validate_unit_change_convertible( # No statistics, unit in state matching device class - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) # No statistics, unit in state not matching device class - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4221,7 +4245,10 @@ async def test_validate_unit_change_convertible( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4241,7 +4268,10 @@ async def test_validate_unit_change_convertible( # Valid state - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4253,7 +4283,10 @@ async def test_validate_unit_change_convertible( # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4292,22 +4325,7 @@ async def test_validate_statistics_unit_ignore_device_class( The test asserts that the sensor's device class is ignored. """ - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4319,7 +4337,9 @@ async def test_validate_statistics_unit_ignore_device_class( # No statistics, no device class - empty response initial_attributes = {"state_class": "measurement", "unit_of_measurement": "dogs"} - hass.states.async_set("sensor.test", 10, attributes=initial_attributes) + hass.states.async_set( + "sensor.test", 10, attributes=initial_attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4327,7 +4347,10 @@ async def test_validate_statistics_unit_ignore_device_class( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4384,24 +4407,10 @@ async def test_validate_statistics_unit_change_no_device_class( conversion, and the unit is then changed to a unit which can and cannot be converted to the original unit. """ - msg_id = 1 attributes = dict(attributes) attributes.pop("device_class") - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4413,14 +4422,20 @@ async def test_validate_statistics_unit_change_no_device_class( # No statistics, sensor state set - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) # No statistics, sensor state set to an incompatible unit - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4429,7 +4444,10 @@ async def test_validate_statistics_unit_change_no_device_class( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": "dogs"} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": "dogs"}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4449,7 +4467,10 @@ async def test_validate_statistics_unit_change_no_device_class( # Valid state - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4461,7 +4482,10 @@ async def test_validate_statistics_unit_change_no_device_class( # Valid state in compatible unit - empty response hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4498,22 +4522,7 @@ async def test_validate_statistics_unsupported_state_class( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4524,7 +4533,9 @@ async def test_validate_statistics_unsupported_state_class( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4536,7 +4547,9 @@ async def test_validate_statistics_unsupported_state_class( # State update with invalid state class, expect error _attributes = dict(attributes) _attributes.pop("state_class") - hass.states.async_set("sensor.test", 12, attributes=_attributes) + hass.states.async_set( + "sensor.test", 12, attributes=_attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() expected = { "sensor.test": [ @@ -4566,22 +4579,7 @@ async def test_validate_statistics_sensor_no_longer_recorded( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4592,7 +4590,9 @@ async def test_validate_statistics_sensor_no_longer_recorded( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4633,22 +4633,7 @@ async def test_validate_statistics_sensor_not_recorded( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4673,7 +4658,9 @@ async def test_validate_statistics_sensor_not_recorded( "entity_filter", return_value=False, ): - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, expected) @@ -4697,22 +4684,7 @@ async def test_validate_statistics_sensor_removed( unit, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) hass.config.units = units await async_setup_component(hass, "sensor", {}) @@ -4723,7 +4695,9 @@ async def test_validate_statistics_sensor_removed( await assert_validation_result(client, {}) # No statistics, valid state - empty response - hass.states.async_set("sensor.test", 10, attributes=attributes) + hass.states.async_set( + "sensor.test", 10, attributes=attributes, timestamp=now.timestamp() + ) await hass.async_block_till_done() await assert_validation_result(client, {}) @@ -4760,33 +4734,7 @@ async def test_validate_statistics_unit_change_no_conversion( unit2, ) -> None: """Test validate_statistics.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -4797,13 +4745,19 @@ async def test_validate_statistics_unit_change_no_conversion( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) # No statistics, changed unit - empty response hass.states.async_set( - "sensor.test", 11, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 11, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4811,11 +4765,14 @@ async def test_validate_statistics_unit_change_no_conversion( await async_recorder_block_till_done(hass) do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) - await assert_statistic_ids([]) + await assert_statistic_ids(hass, []) # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4824,13 +4781,16 @@ async def test_validate_statistics_unit_change_no_conversion( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) await assert_validation_result(client, {}) # Change unit - expect error hass.states.async_set( - "sensor.test", 13, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 13, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) expected = { @@ -4850,7 +4810,10 @@ async def test_validate_statistics_unit_change_no_conversion( # Original unit - empty response hass.states.async_set( - "sensor.test", 14, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 14, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await async_recorder_block_till_done(hass) await assert_validation_result(client, {}) @@ -4894,33 +4857,7 @@ async def test_validate_statistics_unit_change_equivalent_units( This tests no validation issue is created when a sensor's unit changes to an equivalent unit. """ - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -4931,7 +4868,10 @@ async def test_validate_statistics_unit_change_equivalent_units( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -4940,12 +4880,15 @@ async def test_validate_statistics_unit_change_equivalent_units( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit - empty response hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp() + 1, ) await assert_validation_result(client, {}) @@ -4954,7 +4897,7 @@ async def test_validate_statistics_unit_change_equivalent_units( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit2}] ) await assert_validation_result(client, {}) @@ -4978,34 +4921,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( This tests a validation issue is created when a sensor's unit changes to an equivalent unit which is not known to the unit converters. """ - - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - - async def assert_statistic_ids(expected_result): - with session_scope(hass=hass, read_only=True) as session: - db_states = list(session.query(StatisticsMeta)) - assert len(db_states) == len(expected_result) - for i, db_state in enumerate(db_states): - assert db_state.statistic_id == expected_result[i]["statistic_id"] - assert ( - db_state.unit_of_measurement - == expected_result[i]["unit_of_measurement"] - ) - - now = dt_util.utcnow() + now = get_start_time(dt_util.utcnow()) await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) @@ -5016,7 +4932,10 @@ async def test_validate_statistics_unit_change_equivalent_units_2( # No statistics, original unit - empty response hass.states.async_set( - "sensor.test", 10, attributes={**attributes, "unit_of_measurement": unit1} + "sensor.test", + 10, + attributes={**attributes, "unit_of_measurement": unit1}, + timestamp=now.timestamp(), ) await assert_validation_result(client, {}) @@ -5025,12 +4944,15 @@ async def test_validate_statistics_unit_change_equivalent_units_2( do_adhoc_statistics(hass, start=now) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) # Units changed to an equivalent unit which is not known by the unit converters hass.states.async_set( - "sensor.test", 12, attributes={**attributes, "unit_of_measurement": unit2} + "sensor.test", + 12, + attributes={**attributes, "unit_of_measurement": unit2}, + timestamp=now.timestamp(), ) expected = { "sensor.test": [ @@ -5052,7 +4974,7 @@ async def test_validate_statistics_unit_change_equivalent_units_2( do_adhoc_statistics(hass, start=now + timedelta(hours=1)) await async_recorder_block_till_done(hass) await assert_statistic_ids( - [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] + hass, [{"statistic_id": "sensor.test", "unit_of_measurement": unit1}] ) await assert_validation_result(client, expected) @@ -5061,21 +4983,6 @@ async def test_validate_statistics_other_domain( hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test sensor does not raise issues for statistics for other domains.""" - msg_id = 1 - - def next_id(): - nonlocal msg_id - msg_id += 1 - return msg_id - - async def assert_validation_result(client, expected_result): - await client.send_json( - {"id": next_id(), "type": "recorder/validate_statistics"} - ) - response = await client.receive_json() - assert response["success"] - assert response["result"] == expected_result - await async_setup_component(hass, "sensor", {}) await async_recorder_block_till_done(hass) client = await hass_ws_client() @@ -5211,7 +5118,7 @@ async def async_record_states_partially_unavailable(hass, zero, entity_id, attri one = zero + timedelta(seconds=1 * 5) two = one + timedelta(seconds=15 * 5) three = two + timedelta(seconds=30 * 5) - four = three + timedelta(seconds=15 * 5) + four = three + timedelta(seconds=14 * 5) states = {entity_id: []} with freeze_time(one) as freezer: diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index 1ab4eed11ee..e2493319b69 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -1,10 +1,10 @@ """Configuration for 17Track tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch -from py17track.package import Package +from pyseventeentrack.package import Package import pytest -from typing_extensions import Generator from homeassistant.components.seventeentrack.const import ( CONF_SHOW_ARCHIVED, diff --git a/tests/components/seventeentrack/snapshots/test_services.ambr b/tests/components/seventeentrack/snapshots/test_services.ambr index 185a1d44fe0..202c5a3d667 100644 --- a/tests/components/seventeentrack/snapshots/test_services.ambr +++ b/tests/components/seventeentrack/snapshots/test_services.ambr @@ -3,27 +3,39 @@ dict({ 'packages': list([ dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 3', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Expired', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '123', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'In Transit', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Delivered', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), ]), @@ -33,19 +45,27 @@ dict({ 'packages': list([ dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 1', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'In Transit', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '456', }), dict({ + 'destination_country': 'Belgium', 'friendly_name': 'friendly name 2', 'info_text': 'info text 1', 'location': 'location 1', + 'origin_country': 'Belgium', + 'package_type': 'Registered Parcel', 'status': 'Delivered', 'timestamp': datetime.datetime(2020, 8, 10, 10, 32, tzinfo=), + 'tracking_info_language': 'Unknown', 'tracking_number': '789', }), ]), diff --git a/tests/components/seventeentrack/test_config_flow.py b/tests/components/seventeentrack/test_config_flow.py index 380146ed276..0a7c4ca918c 100644 --- a/tests/components/seventeentrack/test_config_flow.py +++ b/tests/components/seventeentrack/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from py17track.errors import SeventeenTrackError +from pyseventeentrack.errors import SeventeenTrackError import pytest from homeassistant import config_entries diff --git a/tests/components/seventeentrack/test_repairs.py b/tests/components/seventeentrack/test_repairs.py new file mode 100644 index 00000000000..0f697c1ad49 --- /dev/null +++ b/tests/components/seventeentrack/test_repairs.py @@ -0,0 +1,95 @@ +"""Tests for the seventeentrack repair flow.""" + +from http import HTTPStatus +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN +from homeassistant.components.repairs.websocket_api import RepairsFlowIndexView +from homeassistant.components.seventeentrack import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import goto_future, init_integration +from .conftest import DEFAULT_SUMMARY_LENGTH, get_package + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_repair( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + issue_registry: ir.IssueRegistry, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Ensure everything starts correctly.""" + await init_integration(hass, mock_config_entry) # 2 + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + assert len(issue_registry.issues) == 1 + + package = get_package() + mock_seventeentrack.return_value.profile.packages.return_value = [package] + await goto_future(hass, freezer) + + assert hass.states.get("sensor.17track_package_friendly_name_1") + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH + 1 + + assert "deprecated" not in mock_config_entry.data + + repair_issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"deprecate_sensor_{mock_config_entry.entry_id}" + ) + + assert await async_setup_component(hass, REPAIRS_DOMAIN, {REPAIRS_DOMAIN: {}}) + + client = await hass_client() + + resp = await client.post( + RepairsFlowIndexView.url, + json={"handler": DOMAIN, "issue_id": repair_issue.issue_id}, + ) + + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "form", + "flow_id": flow_id, + "handler": DOMAIN, + "step_id": "confirm", + "data_schema": [], + "errors": None, + "description_placeholders": None, + "last_step": None, + "preview": None, + } + + resp = await client.post(RepairsFlowIndexView.url + f"/{flow_id}") + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + assert data == { + "type": "create_entry", + "handler": DOMAIN, + "flow_id": flow_id, + "description": None, + "description_placeholders": None, + } + + assert mock_config_entry.data["deprecated"] + + repair_issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id="deprecate_sensor" + ) + + assert repair_issue is None + + await goto_future(hass, freezer) + assert len(hass.states.async_entity_ids()) == DEFAULT_SUMMARY_LENGTH diff --git a/tests/components/seventeentrack/test_sensor.py b/tests/components/seventeentrack/test_sensor.py index 75cc6435073..ca16fc64833 100644 --- a/tests/components/seventeentrack/test_sensor.py +++ b/tests/components/seventeentrack/test_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory -from py17track.errors import SeventeenTrackError +from pyseventeentrack.errors import SeventeenTrackError from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -317,4 +317,4 @@ async def test_full_valid_platform_config( assert await async_setup_component(hass, "sensor", VALID_PLATFORM_CONFIG_FULL) await hass.async_block_till_done() assert len(hass.states.async_entity_ids()) == len(DEFAULT_SUMMARY.keys()) - assert len(issue_registry.issues) == 1 + assert len(issue_registry.issues) == 2 diff --git a/tests/components/sfr_box/conftest.py b/tests/components/sfr_box/conftest.py index e86cd06650e..7c1f8bbab5c 100644 --- a/tests/components/sfr_box/conftest.py +++ b/tests/components/sfr_box/conftest.py @@ -1,11 +1,11 @@ """Provide common SFR Box fixtures.""" +from collections.abc import Generator import json from unittest.mock import AsyncMock, patch import pytest from sfrbox_api.models import DslInfo, FtthInfo, SystemInfo, WanInfo -from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntry diff --git a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr index f14ec98a418..0023f65c90e 100644 --- a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , @@ -149,6 +150,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_button.ambr b/tests/components/sfr_box/snapshots/test_button.ambr index eee419bf373..df097b58c51 100644 --- a/tests/components/sfr_box/snapshots/test_button.ambr +++ b/tests/components/sfr_box/snapshots/test_button.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_sensor.ambr b/tests/components/sfr_box/snapshots/test_sensor.ambr index 649c94c89dc..46b22448d25 100644 --- a/tests/components/sfr_box/snapshots/test_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_sensor.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', + 'model_id': None, 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/test_binary_sensor.py b/tests/components/sfr_box/test_binary_sensor.py index 8dba537f6cb..6152f8e2721 100644 --- a/tests/components/sfr_box/test_binary_sensor.py +++ b/tests/components/sfr_box/test_binary_sensor.py @@ -1,11 +1,11 @@ """Test the SFR Box binary sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/sfr_box/test_button.py b/tests/components/sfr_box/test_button.py index 4f20a2f34a3..f555ccebbf9 100644 --- a/tests/components/sfr_box/test_button.py +++ b/tests/components/sfr_box/test_button.py @@ -1,11 +1,11 @@ """Test the SFR Box buttons.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxError from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/sfr_box/test_diagnostics.py b/tests/components/sfr_box/test_diagnostics.py index 597631d12f1..d31d97cbcf8 100644 --- a/tests/components/sfr_box/test_diagnostics.py +++ b/tests/components/sfr_box/test_diagnostics.py @@ -1,11 +1,11 @@ """Test the SFR Box diagnostics.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.models import SystemInfo from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant diff --git a/tests/components/sfr_box/test_init.py b/tests/components/sfr_box/test_init.py index 14688009c5c..19e15491be1 100644 --- a/tests/components/sfr_box/test_init.py +++ b/tests/components/sfr_box/test_init.py @@ -1,10 +1,10 @@ """Test the SFR Box setup process.""" +from collections.abc import Generator from unittest.mock import patch import pytest from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError -from typing_extensions import Generator from homeassistant.components.sfr_box.const import DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigEntryState diff --git a/tests/components/sfr_box/test_sensor.py b/tests/components/sfr_box/test_sensor.py index 506e1ed8962..dd4a67b42f6 100644 --- a/tests/components/sfr_box/test_sensor.py +++ b/tests/components/sfr_box/test_sensor.py @@ -1,10 +1,10 @@ """Test the SFR Box sensors.""" +from collections.abc import Generator from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform diff --git a/tests/components/shelly/__init__.py b/tests/components/shelly/__init__.py index 4631a17969e..7de45eeee98 100644 --- a/tests/components/shelly/__init__.py +++ b/tests/components/shelly/__init__.py @@ -23,6 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, + DeviceEntry, DeviceRegistry, format_mac, ) @@ -111,6 +112,7 @@ def register_entity( unique_id: str, config_entry: ConfigEntry | None = None, capabilities: Mapping[str, Any] | None = None, + device_id: str | None = None, ) -> str: """Register enabled entity, return entity_id.""" entity_registry = er.async_get(hass) @@ -122,6 +124,7 @@ def register_entity( disabled_by=None, config_entry=config_entry, capabilities=capabilities, + device_id=device_id, ) return f"{domain}.{object_id}" @@ -145,9 +148,11 @@ def get_entity_state(hass: HomeAssistant, entity_id: str) -> str: return entity.state -def register_device(device_registry: DeviceRegistry, config_entry: ConfigEntry) -> None: +def register_device( + device_registry: DeviceRegistry, config_entry: ConfigEntry +) -> DeviceEntry: """Register Shelly device.""" - device_registry.async_get_or_create( + return device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(CONNECTION_NETWORK_MAC, format_mac(MOCK_MAC))}, ) diff --git a/tests/components/shelly/bluetooth/test_scanner.py b/tests/components/shelly/bluetooth/test_scanner.py index c7bbb5cb708..1076691a768 100644 --- a/tests/components/shelly/bluetooth/test_scanner.py +++ b/tests/components/shelly/bluetooth/test_scanner.py @@ -12,7 +12,9 @@ from homeassistant.core import HomeAssistant from .. import init_integration, inject_rpc_device_event -async def test_scanner_v1(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: +async def test_scanner_v1( + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch +) -> None: """Test injecting data into the scanner v1.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -50,7 +52,9 @@ async def test_scanner_v1(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> assert ble_device is None -async def test_scanner_v2(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> None: +async def test_scanner_v2( + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch +) -> None: """Test injecting data into the scanner v2.""" await init_integration( hass, 2, options={CONF_BLE_SCANNER_MODE: BLEScannerMode.ACTIVE} @@ -93,7 +97,7 @@ async def test_scanner_v2(hass: HomeAssistant, mock_rpc_device, monkeypatch) -> async def test_scanner_ignores_non_ble_events( - hass: HomeAssistant, mock_rpc_device, monkeypatch + hass: HomeAssistant, mock_rpc_device, monkeypatch: pytest.MonkeyPatch ) -> None: """Test injecting non ble data into the scanner.""" await init_integration( @@ -119,7 +123,10 @@ async def test_scanner_ignores_non_ble_events( async def test_scanner_ignores_wrong_version_and_logs( - hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mock_rpc_device, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Test injecting wrong version of ble data into the scanner.""" await init_integration( @@ -152,7 +159,10 @@ async def test_scanner_ignores_wrong_version_and_logs( async def test_scanner_warns_on_corrupt_event( - hass: HomeAssistant, mock_rpc_device, monkeypatch, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + mock_rpc_device, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, ) -> None: """Test injecting garbage ble data into the scanner.""" await init_integration( diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 7caaae8621e..a2629d21362 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -11,11 +11,11 @@ from homeassistant.components.shelly.const import ( EVENT_SHELLY_CLICK, REST_SENSORS_UPDATE_INTERVAL, ) -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from . import MOCK_MAC -from tests.common import async_capture_events, async_mock_service +from tests.common import async_capture_events MOCK_SETTINGS = { "name": "Test name", @@ -292,12 +292,6 @@ def mock_ws_server(): yield -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture def events(hass: HomeAssistant): """Yield caught shelly_click events.""" diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index dc68b657796..18f65deb907 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -1,5 +1,6 @@ """Tests for Shelly binary sensor platform.""" +from copy import deepcopy from unittest.mock import Mock from aioshelly.const import MODEL_MOTION @@ -10,6 +11,7 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.shelly.const import UPDATE_PERIOD_MULTIPLIER from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -354,3 +356,104 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( await hass.async_block_till_done() assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual binary sensor", "binary_sensor.test_name_virtual_binary_sensor"), + (None, "binary_sensor.test_name_boolean_203"), + ], +) +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual binary sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:203"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:203-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:203"], "value", False) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + +async def test_rpc_remove_virtual_binary_sensor_when_mode_toggle( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual binary sensor will be removed if the mode has been changed to a toggle.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "toggle"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_binary_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual binary sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + BINARY_SENSOR_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index a3040fc2eb8..0c574a33e0c 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1305,3 +1305,22 @@ async def test_reconfigure_with_exception( ) assert result["errors"] == {"base": base_error} + + +async def test_zeroconf_rejects_ipv6(hass: HomeAssistant) -> None: + """Test zeroconf discovery rejects ipv6.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), + ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], + hostname="mock_hostname", + name="shelly1pm-12345", + port=None, + properties={zeroconf.ATTR_PROPERTIES_ID: "shelly1pm-12345"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ipv6_not_supported" diff --git a/tests/components/shelly/test_device_trigger.py b/tests/components/shelly/test_device_trigger.py index d47cca17460..fb68393304b 100644 --- a/tests/components/shelly/test_device_trigger.py +++ b/tests/components/shelly/test_device_trigger.py @@ -178,7 +178,7 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_click_event_block_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, ) -> None: """Test for click_event trigger firing for block device.""" @@ -215,14 +215,14 @@ async def test_if_fires_on_click_event_block_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_click" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_click" async def test_if_fires_on_click_event_rpc_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, ) -> None: """Test for click_event trigger firing for rpc device.""" @@ -259,14 +259,14 @@ async def test_if_fires_on_click_event_rpc_device( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_block_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -304,14 +304,14 @@ async def test_validate_trigger_block_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_click" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_click" async def test_validate_trigger_rpc_device_not_ready( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -349,8 +349,8 @@ async def test_validate_trigger_rpc_device_not_ready( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_validate_trigger_invalid_triggers( @@ -391,7 +391,7 @@ async def test_validate_trigger_invalid_triggers( async def test_rpc_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -429,14 +429,14 @@ async def test_rpc_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single_push" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single_push" async def test_block_no_runtime_data( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_block_device: Mock, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -474,5 +474,5 @@ async def test_block_no_runtime_data( hass.bus.async_fire(EVENT_SHELLY_CLICK, message) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_single" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_single" diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index ff453b3251c..73f432094b9 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -1,18 +1,24 @@ """Tests for Shelly number platform.""" +from copy import deepcopy from unittest.mock import AsyncMock, Mock from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError import pytest from homeassistant.components.number import ( + ATTR_MAX, + ATTR_MIN, + ATTR_MODE, + ATTR_STEP, ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, + NumberMode, ) from homeassistant.components.shelly.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import DeviceRegistry @@ -240,3 +246,145 @@ async def test_block_set_value_auth_error( assert "context" in flow assert flow["context"].get("source") == SOURCE_REAUTH assert flow["context"].get("entry_id") == entry.entry_id + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit", "view", "mode"), + [ + ( + "Virtual number", + "number.test_name_virtual_number", + "%", + "%", + "field", + NumberMode.BOX, + ), + (None, "number.test_name_number_203", "", None, "field", NumberMode.BOX), + ( + "Virtual slider", + "number.test_name_virtual_slider", + "Hz", + "Hz", + "slider", + NumberMode.SLIDER, + ), + ], +) +async def test_rpc_device_virtual_number( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, + view: str, + mode: NumberMode, +) -> None: + """Test a virtual number for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": view}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 12.3} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.3" + assert state.attributes.get(ATTR_MIN) == 0 + assert state.attributes.get(ATTR_MAX) == 100 + assert state.attributes.get(ATTR_STEP) == 0.1 + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + assert state.attributes.get(ATTR_MODE) is mode + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 78.9) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "78.9" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 56.7}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_virtual_number_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": -1000, + "max": 1000, + "meta": {"ui": {"step": 1, "unit": "", "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 123} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_number_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + NUMBER_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_select.py b/tests/components/shelly/test_select.py new file mode 100644 index 00000000000..0a6eb2a5843 --- /dev/null +++ b/tests/components/shelly/test_select.py @@ -0,0 +1,151 @@ +"""Tests for Shelly select platform.""" + +from copy import deepcopy +from unittest.mock import Mock + +import pytest + +from homeassistant.components.select import ( + ATTR_OPTION, + ATTR_OPTIONS, + DOMAIN as SELECT_PLATFORM, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry + +from . import init_integration, register_device, register_entity + + +@pytest.mark.parametrize( + ("name", "entity_id", "value", "expected_state"), + [ + ("Virtual enum", "select.test_name_virtual_enum", "option 1", "Title 1"), + (None, "select.test_name_enum_203", None, STATE_UNKNOWN), + ], +) +async def test_rpc_device_virtual_enum( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + value: str | None, + expected_state: str, +) -> None: + """Test a virtual enum for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["enum:203"] = { + "name": name, + "options": ["option 1", "option 2", "option 3"], + "meta": { + "ui": { + "view": "dropdown", + "titles": {"option 1": "Title 1", "option 2": None}, + } + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:203"] = {"value": value} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state + assert state.attributes.get(ATTR_OPTIONS) == [ + "Title 1", + "option 2", + "option 3", + ] + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-enum:203-enum" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 2") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "option 2" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "option 1") + await hass.services.async_call( + SELECT_PLATFORM, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: "Title 1"}, + blocking=True, + ) + # 'Title 1' corresponds to 'option 1' + assert mock_rpc_device.call_rpc.call_args[0][1] == {"id": 203, "value": "option 1"} + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "Title 1" + + +async def test_rpc_remove_virtual_enum_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual enum will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["enum:200"] = { + "name": None, + "options": ["one", "two"], + "meta": { + "ui": {"view": "label", "titles": {"one": "Title 1", "two": "Title 2"}} + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:200"] = {"value": "one"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SELECT_PLATFORM, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_enum_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual enum will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SELECT_PLATFORM, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 9f510ba8fe9..a39123a6722 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.homeassistant import ( SERVICE_UPDATE_ENTITY, ) from homeassistant.components.sensor import ( + ATTR_OPTIONS, ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -856,3 +857,335 @@ async def test_rpc_disabled_xfreq( entry = entity_registry.async_get(entity_id) assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual sensor", "sensor.test_name_virtual_sensor"), + (None, "sensor.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + +async def test_rpc_remove_text_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "field"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_text_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id", "original_unit", "expected_unit"), + [ + ("Virtual number sensor", "sensor.test_name_virtual_number_sensor", "W", "W"), + (None, "sensor.test_name_number_203", "", None), + ], +) +async def test_rpc_device_virtual_number_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + original_unit: str, + expected_unit: str | None, +) -> None: + """Test a virtual number sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["number:203"] = { + "name": name, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 0.1, "unit": original_unit, "view": "label"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:203"] = {"value": 34.5} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "34.5" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-number:203-number" + + monkeypatch.setitem(mock_rpc_device.status["number:203"], "value", 56.7) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "56.7" + + +async def test_rpc_remove_number_virtual_sensor_when_mode_field( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual number sensor will be removed if the mode has been changed to a field.""" + config = deepcopy(mock_rpc_device.config) + config["number:200"] = { + "name": None, + "min": 0, + "max": 100, + "meta": {"ui": {"step": 1, "unit": "", "view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["number:200"] = {"value": 67.8} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_number_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual number sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_number_200", + "number:200-number", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +@pytest.mark.parametrize( + ("name", "entity_id", "value", "expected_state"), + [ + ( + "Virtual enum sensor", + "sensor.test_name_virtual_enum_sensor", + "one", + "Title 1", + ), + (None, "sensor.test_name_enum_203", None, STATE_UNKNOWN), + ], +) +async def test_rpc_device_virtual_enum_sensor( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, + value: str | None, + expected_state: str, +) -> None: + """Test a virtual enum sensor for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["enum:203"] = { + "name": name, + "options": ["one", "two", "three"], + "meta": {"ui": {"view": "label", "titles": {"one": "Title 1", "two": None}}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:203"] = {"value": value} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state + assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENUM + assert state.attributes.get(ATTR_OPTIONS) == ["Title 1", "two", "three"] + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-enum:203-enum" + + monkeypatch.setitem(mock_rpc_device.status["enum:203"], "value", "two") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "two" + + +async def test_rpc_remove_enum_virtual_sensor_when_mode_dropdown( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual enum sensor will be removed if the mode has been changed to a dropdown.""" + config = deepcopy(mock_rpc_device.config) + config["enum:200"] = { + "name": None, + "options": ["option 1", "option 2", "option 3"], + "meta": { + "ui": { + "view": "dropdown", + "titles": {"option 1": "Title 1", "option 2": None}, + } + }, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["enum:200"] = {"value": "option 2"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_enum_virtual_sensor_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual enum sensor will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SENSOR_DOMAIN, + "test_name_enum_200", + "enum:200-enum", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 637a92a7fbe..124562be8d5 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -25,6 +25,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceRegistry from homeassistant.helpers.entity_registry import EntityRegistry @@ -187,7 +188,7 @@ async def test_block_device_unique_ids( async def test_block_set_state_connection_error( - hass: HomeAssistant, mock_block_device, monkeypatch + hass: HomeAssistant, mock_block_device, monkeypatch: pytest.MonkeyPatch ) -> None: """Test block device set state connection error.""" monkeypatch.setattr( @@ -430,3 +431,142 @@ async def test_wall_display_relay_mode( entry = entity_registry.async_get(switch_entity_id) assert entry assert entry.unique_id == "123456789ABC-switch:0" + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual switch", "switch.test_name_virtual_switch"), + (None, "switch.test_name_boolean_200"), + ], +) +async def test_rpc_device_virtual_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual switch for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = { + "name": name, + "meta": {"ui": {"view": "toggle"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_ON + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-boolean:200-boolean" + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", False) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_OFF + + monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == STATE_ON + + +async def test_rpc_device_virtual_binary_sensor( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test that a switch entity has not been created for a virtual binary sensor.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + entity_id = "switch.test_name_boolean_200" + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert not state + + +async def test_rpc_remove_virtual_switch_when_mode_label( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual switch will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["boolean:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["boolean:200"] = {"value": True} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_switch_when_orphaned( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual switch will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + SWITCH_DOMAIN, + "test_name_boolean_200", + "boolean:200-boolean", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shelly/test_text.py b/tests/components/shelly/test_text.py new file mode 100644 index 00000000000..19acb856f35 --- /dev/null +++ b/tests/components/shelly/test_text.py @@ -0,0 +1,129 @@ +"""Tests for Shelly text platform.""" + +from copy import deepcopy +from unittest.mock import Mock + +import pytest + +from homeassistant.components.text import ( + ATTR_VALUE, + DOMAIN as TEXT_PLATFORM, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.entity_registry import EntityRegistry + +from . import init_integration, register_device, register_entity + + +@pytest.mark.parametrize( + ("name", "entity_id"), + [ + ("Virtual text", "text.test_name_virtual_text"), + (None, "text.test_name_text_203"), + ], +) +async def test_rpc_device_virtual_text( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + name: str | None, + entity_id: str, +) -> None: + """Test a virtual text for RPC device.""" + config = deepcopy(mock_rpc_device.config) + config["text:203"] = { + "name": name, + "meta": {"ui": {"view": "field"}}, + } + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:203"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 3) + + state = hass.states.get(entity_id) + assert state + assert state.state == "lorem ipsum" + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == "123456789ABC-text:203-text" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "dolor sit amet") + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "dolor sit amet" + + monkeypatch.setitem(mock_rpc_device.status["text:203"], "value", "sed do eiusmod") + await hass.services.async_call( + TEXT_PLATFORM, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: "sed do eiusmod"}, + blocking=True, + ) + mock_rpc_device.mock_update() + assert hass.states.get(entity_id).state == "sed do eiusmod" + + +async def test_rpc_remove_virtual_text_when_mode_label( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test if the virtual text will be removed if the mode has been changed to a label.""" + config = deepcopy(mock_rpc_device.config) + config["text:200"] = {"name": None, "meta": {"ui": {"view": "label"}}} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status["text:200"] = {"value": "lorem ipsum"} + monkeypatch.setattr(mock_rpc_device, "status", status) + + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry + + +async def test_rpc_remove_virtual_text_when_orphaned( + hass: HomeAssistant, + entity_registry: EntityRegistry, + device_registry: DeviceRegistry, + mock_rpc_device: Mock, +) -> None: + """Check whether the virtual text will be removed if it has been removed from the device configuration.""" + config_entry = await init_integration(hass, 3, skip_setup=True) + device_entry = register_device(device_registry, config_entry) + entity_id = register_entity( + hass, + TEXT_PLATFORM, + "test_name_text_200", + "text:200-text", + config_entry, + device_id=device_entry.id, + ) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + entry = entity_registry.async_get(entity_id) + assert not entry diff --git a/tests/components/shopping_list/test_todo.py b/tests/components/shopping_list/test_todo.py index 173544d0be2..c54a6abfd6f 100644 --- a/tests/components/shopping_list/test_todo.py +++ b/tests/components/shopping_list/test_todo.py @@ -1,11 +1,18 @@ """Test shopping list todo platform.""" -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from typing import Any import pytest -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN +from homeassistant.components.todo import ( + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -13,11 +20,12 @@ from tests.typing import WebSocketGenerator TEST_ENTITY = "todo.shopping_list" +type WsGetItemsType = Callable[[], Coroutine[Any, Any, list[dict[str, str]]]] +type WsMoveItemType = Callable[[str, str | None], Coroutine[Any, Any, dict[str, Any]]] + @pytest.fixture -async def ws_get_items( - hass_ws_client: WebSocketGenerator, -) -> Callable[[], Awaitable[dict[str, str]]]: +async def ws_get_items(hass_ws_client: WebSocketGenerator) -> WsGetItemsType: """Fixture to fetch items from the todo websocket.""" async def get() -> list[dict[str, str]]: @@ -37,9 +45,7 @@ async def ws_get_items( @pytest.fixture -async def ws_move_item( - hass_ws_client: WebSocketGenerator, -) -> Callable[[str, str | None], Awaitable[None]]: +async def ws_move_item(hass_ws_client: WebSocketGenerator) -> WsMoveItemType: """Fixture to move an item in the todo list.""" async def move(uid: str, previous_uid: str | None) -> dict[str, Any]: @@ -62,7 +68,7 @@ async def test_get_items( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test creating a shopping list item with the WS API and verifying with To-do API.""" client = await hass_ws_client(hass) @@ -93,16 +99,16 @@ async def test_get_items( async def test_add_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test adding shopping_list item and listing it.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -120,14 +126,14 @@ async def test_add_item( async def test_remove_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test removing a todo item.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) items = await ws_get_items() @@ -142,11 +148,11 @@ async def test_remove_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": [items[0]["uid"]], + ATTR_ITEM: [items[0]["uid"]], }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -161,18 +167,18 @@ async def test_remove_item( async def test_bulk_remove( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test removing a todo item.""" for _i in range(5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -186,11 +192,11 @@ async def test_bulk_remove( await hass.services.async_call( TODO_DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": uids, + ATTR_ITEM: uids, }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -205,18 +211,18 @@ async def test_bulk_remove( async def test_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -234,12 +240,12 @@ async def test_update_item( # Mark item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "status": "completed", + ATTR_ITEM: "soda", + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -258,18 +264,18 @@ async def test_update_item( async def test_partial_update_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item with partial information.""" # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -287,12 +293,12 @@ async def test_partial_update_item( # Mark item completed without changing the summary await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "status": "completed", + ATTR_ITEM: item["uid"], + ATTR_STATUS: "completed", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -310,12 +316,12 @@ async def test_partial_update_item( # Change the summary without changing the status await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": item["uid"], - "rename": "other summary", + ATTR_ITEM: item["uid"], + ATTR_RENAME: "other summary", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -334,19 +340,19 @@ async def test_partial_update_item( async def test_update_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], + ws_get_items: WsGetItemsType, ) -> None: """Test updating a todo item that does not exist.""" with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "invalid-uid", - "rename": "Example task", + ATTR_ITEM: "invalid-uid", + ATTR_RENAME: "Example task", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -380,8 +386,8 @@ async def test_update_invalid_item( async def test_move_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], - ws_move_item: Callable[[str, str | None], Awaitable[dict[str, Any]]], + ws_get_items: WsGetItemsType, + ws_move_item: WsMoveItemType, src_idx: int, dst_idx: int | None, expected_items: list[str], @@ -391,11 +397,11 @@ async def test_move_item( for i in range(1, 5): await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": f"item {i}", + ATTR_ITEM: f"item {i}", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -422,16 +428,16 @@ async def test_move_item( async def test_move_invalid_item( hass: HomeAssistant, sl_setup: None, - ws_get_items: Callable[[], Awaitable[dict[str, str]]], - ws_move_item: Callable[[str, int | None], Awaitable[dict[str, Any]]], + ws_get_items: WsGetItemsType, + ws_move_item: WsMoveItemType, ) -> None: """Test moving an item that does not exist.""" await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "soda"}, - target={"entity_id": TEST_ENTITY}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "soda"}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -456,11 +462,11 @@ async def test_subscribe_item( # Create new item await hass.services.async_call( TODO_DOMAIN, - "add_item", + TodoServices.ADD_ITEM, { - "item": "soda", + ATTR_ITEM: "soda", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) @@ -491,12 +497,12 @@ async def test_subscribe_item( # Rename item item completed await hass.services.async_call( TODO_DOMAIN, - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "soda", - "rename": "milk", + ATTR_ITEM: "soda", + ATTR_RENAME: "milk", }, - target={"entity_id": TEST_ENTITY}, + target={ATTR_ENTITY_ID: TEST_ENTITY}, blocking=True, ) diff --git a/tests/components/simplefin/__init__.py b/tests/components/simplefin/__init__.py new file mode 100644 index 00000000000..e4c7848ba9a --- /dev/null +++ b/tests/components/simplefin/__init__.py @@ -0,0 +1,13 @@ +"""Tests for SimpleFin.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/simplefin/conftest.py b/tests/components/simplefin/conftest.py new file mode 100644 index 00000000000..328e16ccbd0 --- /dev/null +++ b/tests/components/simplefin/conftest.py @@ -0,0 +1,83 @@ +"""Test fixtures for SimpleFIN.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from simplefin4py import FinancialData +from simplefin4py.exceptions import SimpleFinInvalidClaimTokenError + +from homeassistant.components.simplefin import CONF_ACCESS_URL +from homeassistant.components.simplefin.const import DOMAIN + +from tests.common import MockConfigEntry, load_fixture + +MOCK_ACCESS_URL = "https://i:am@yomama.house.com" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setting up a config entry.""" + with patch( + "homeassistant.components.simplefin.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture +async def mock_config_entry() -> MockConfigEntry: + """Fixture for MockConfigEntry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_ACCESS_URL: MOCK_ACCESS_URL}, + version=1, + ) + + +@pytest.fixture +def mock_claim_setup_token() -> str: + """Fixture to mock the claim_setup_token method of SimpleFin.""" + with patch( + "homeassistant.components.simplefin.config_flow.SimpleFin.claim_setup_token", + ) as mock_claim_setup_token: + mock_claim_setup_token.return_value = "https://i:am@yomama.comma" + yield + + +@pytest.fixture +def mock_decode_claim_token_invalid_then_good() -> str: + """Fixture to mock the decode_claim_token method of SimpleFin.""" + return_values = [SimpleFinInvalidClaimTokenError, "valid_return_value"] + with patch( + "homeassistant.components.simplefin.config_flow.SimpleFin.decode_claim_token", + new_callable=lambda: MagicMock(side_effect=return_values), + ): + yield + + +@pytest.fixture +def mock_simplefin_client() -> Generator[AsyncMock]: + """Mock a SimpleFin client.""" + + with ( + patch( + "homeassistant.components.simplefin.SimpleFin", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.simplefin.config_flow.SimpleFin", + new=mock_client, + ), + ): + mock_client.claim_setup_token.return_value = MOCK_ACCESS_URL + client = mock_client.return_value + + fixture_data = load_fixture("fin_data.json", DOMAIN) + fin_data = FinancialData.from_json(fixture_data) + + assert fin_data.accounts != [] + client.fetch_data.return_value = fin_data + + client.access_url = MOCK_ACCESS_URL + + yield mock_client diff --git a/tests/components/simplefin/fixtures/fin_data.json b/tests/components/simplefin/fixtures/fin_data.json new file mode 100644 index 00000000000..bd35945c12b --- /dev/null +++ b/tests/components/simplefin/fixtures/fin_data.json @@ -0,0 +1,173 @@ +{ + "errors": [ + "Connection to Investments may need attention", + "Connection to The Bank of Go may need attention" + ], + "accounts": [ + { + "org": { + "domain": "www.newwealthfront.com", + "name": "The Bank of Go", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newwealthfront.com" + }, + "id": "ACT-1a2b3c4d-5e6f-7g8h-9i0j", + "name": "The Bank", + "currency": "USD", + "balance": "7777.77", + "available-balance": "7777.77", + "balance-date": 1705413843, + "transactions": [ + { + "id": "12394832938403", + "posted": 793090572, + "amount": "-1234.56", + "description": "Enchanted Bait Shop", + "payee": "Uncle Frank", + "memo": "Some memo", + "transacted_at": 793080572 + } + ], + "extra": { + "account-open-date": 978360153 + }, + "holdings": [] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-1k2l3m4n-5o6p-7q8r-9s0t", + "name": "My Checking", + "currency": "USD", + "balance": "12345.67", + "available-balance": "5432.10", + "balance-date": 1705413319, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.newhfcu.org", + "name": "The Bank of Go", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newhfcu.org/" + }, + "id": "ACT-2a3b4c5d-6e7f-8g9h-0i1j", + "name": "PRIME SAVINGS", + "currency": "EUR", + "balance": "9876.54", + "available-balance": "8765.43", + "balance-date": 1705428861, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Random Bank", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-3a4b5c6d-7e8f-9g0h-1i2j", + "name": "Costco Anywhere Visa® Card", + "currency": "USD", + "balance": "-532.69", + "available-balance": "4321.98", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-4k5l6m7n-8o9p-1q2r-3s4t", + "name": "Dr Evil", + "currency": "USD", + "balance": "1000000.00", + "available-balance": "13579.24", + "balance-date": 1705413319, + "transactions": [], + "holdings": [ + { + "id": "HOL-62eb5bb6-4aed-4fe1-bdbe-f28e127e359b", + "created": 1705413320, + "currency": "", + "cost_basis": "10000.00", + "description": "Fantastic FID GROWTH CO K6", + "market_value": "15000.00", + "purchase_price": "0.00", + "shares": "200.00", + "symbol": "FGKFX" + } + ] + }, + { + "org": { + "domain": "www.newfidelity.com", + "name": "Investments", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.newfidelity.com" + }, + "id": "ACT-5k6l7m8n-9o0p-1q2r-3s4t", + "name": "NerdCorp Series B", + "currency": "EUR", + "balance": "13579.24", + "available-balance": "9876.54", + "balance-date": 1705413319, + "transactions": [], + "holdings": [ + { + "id": "HOL-08f775cd-eedf-4ee5-9f53-241c8efa5bf3", + "created": 1705413321, + "currency": "", + "cost_basis": "7500.00", + "description": "Mythical FID GROWTH CO K6", + "market_value": "9876.54", + "purchase_price": "0.00", + "shares": "150.00", + "symbol": "FGKFX" + } + ] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Mythical RandomSavings", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-6a7b8c9d-0e1f-2g3h-4i5j", + "name": "Unicorn Pot", + "currency": "USD", + "balance": "10000.00", + "available-balance": "7500.00", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + }, + { + "org": { + "domain": "www.randombank2.com", + "name": "Mythical RandomSavings", + "sfin-url": "https://beta-bridge.simplefin.org/simplefin", + "url": "https://www.randombank2.com/" + }, + "id": "ACT-7a8b9c0d-1e2f-3g4h-5i6j", + "name": "Castle Mortgage", + "currency": "USD", + "balance": "7500.00", + "available-balance": "5000.00", + "balance-date": 1705429002, + "transactions": [], + "holdings": [] + } + ] +} diff --git a/tests/components/simplefin/snapshots/test_sensor.ambr b/tests/components/simplefin/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c7dced9300e --- /dev/null +++ b/tests/components/simplefin/snapshots/test_sensor.ambr @@ -0,0 +1,809 @@ +# serializer version: 1 +# name: test_all_entities[sensor.investments_dr_evil_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_dr_evil_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments Dr Evil Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.investments_dr_evil_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1000000.00', + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments Dr Evil Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_my_checking_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments My Checking Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.investments_my_checking_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12345.67', + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments My Checking Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_balance', + 'unit_of_measurement': 'EUR', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Investments NerdCorp Series B Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'EUR', + }), + 'context': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '13579.24', + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments NerdCorp Series B Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7500.00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10000.00', + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-532.69', + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Data age', + }), + 'context': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_balance', + 'unit_of_measurement': 'EUR', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'EUR', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9876.54', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:14:21+00:00', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': , + 'original_name': 'Balance', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balance', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_balance', + 'unit_of_measurement': 'USD', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'monetary', + 'friendly_name': 'The Bank of Go The Bank Balance', + 'icon': , + 'state_class': , + 'unit_of_measurement': 'USD', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_balance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7777.77', + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go The Bank Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T14:04:03+00:00', + }) +# --- diff --git a/tests/components/simplefin/test_config_flow.py b/tests/components/simplefin/test_config_flow.py new file mode 100644 index 00000000000..c83f2aed62e --- /dev/null +++ b/tests/components/simplefin/test_config_flow.py @@ -0,0 +1,164 @@ +"""Test config flow.""" + +from unittest.mock import AsyncMock + +import pytest +from simplefin4py.exceptions import ( + SimpleFinAuthError, + SimpleFinClaimError, + SimpleFinInvalidAccountURLError, + SimpleFinInvalidClaimTokenError, + SimpleFinPaymentRequiredError, +) + +from homeassistant.components.simplefin import CONF_ACCESS_URL +from homeassistant.components.simplefin.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import entity_registry as er + +from .conftest import MOCK_ACCESS_URL + +from tests.common import MockConfigEntry + + +async def test_successful_claim( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_simplefin_client: AsyncMock, +) -> None: + """Test successful token claim in config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "SimpleFIN" + assert result["data"] == {CONF_ACCESS_URL: MOCK_ACCESS_URL} + + +async def test_already_setup( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: MOCK_ACCESS_URL}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_access_url( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test standard config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "http://user:password@string"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_ACCESS_URL] == "http://user:password@string" + assert result["title"] == "SimpleFIN" + + +@pytest.mark.parametrize( + ("side_effect", "error_key"), + [ + (SimpleFinInvalidAccountURLError, "url_error"), + (SimpleFinPaymentRequiredError, "payment_required"), + (SimpleFinAuthError, "invalid_auth"), + ], +) +async def test_access_url_errors( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + side_effect: Exception, + error_key: str, +) -> None: + """Test the various errors we can get in access_url mode.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + mock_simplefin_client.claim_setup_token.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + + mock_simplefin_client.claim_setup_token.side_effect = None + + # Pass the entry creation + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "http://user:password@string"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_ACCESS_URL: "http://user:password@string"} + assert result["title"] == "SimpleFIN" + + +@pytest.mark.parametrize( + ("side_effect", "error_key"), + [ + (SimpleFinInvalidClaimTokenError, "invalid_claim_token"), + (SimpleFinClaimError, "claim_error"), + ], +) +async def test_claim_token_errors( + hass: HomeAssistant, + mock_simplefin_client: AsyncMock, + side_effect: Exception, + error_key: str, +) -> None: + """Test config flow with various token claim errors.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + mock_simplefin_client.claim_setup_token.side_effect = side_effect + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_key} + + mock_simplefin_client.claim_setup_token.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ACCESS_URL: "donJulio"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == {CONF_ACCESS_URL: "https://i:am@yomama.house.com"} + assert result["title"] == "SimpleFIN" diff --git a/tests/components/simplefin/test_sensor.py b/tests/components/simplefin/test_sensor.py new file mode 100644 index 00000000000..495f249d4e1 --- /dev/null +++ b/tests/components/simplefin/test_sensor.py @@ -0,0 +1,94 @@ +"""Test SimpleFin Sensor with Snapshot data.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from simplefin4py.exceptions import SimpleFinAuthError, SimpleFinPaymentRequiredError +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.simplefin.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("side_effect"), + [ + (SimpleFinAuthError), + (SimpleFinPaymentRequiredError), + ], +) +async def test_update_errors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, + freezer: FrozenDateTimeFactory, + side_effect: Exception, +) -> None: + """Test connection error.""" + await setup_integration(hass, mock_config_entry) + + assert hass.states.get("sensor.the_bank_of_go_the_bank_balance").state == "7777.77" + assert hass.states.get("sensor.investments_my_checking_balance").state == "12345.67" + assert ( + hass.states.get("sensor.the_bank_of_go_prime_savings_balance").state + == "9876.54" + ) + assert ( + hass.states.get("sensor.random_bank_costco_anywhere_visa_r_card_balance").state + == "-532.69" + ) + assert hass.states.get("sensor.investments_dr_evil_balance").state == "1000000.00" + assert ( + hass.states.get("sensor.investments_nerdcorp_series_b_balance").state + == "13579.24" + ) + assert ( + hass.states.get("sensor.mythical_randomsavings_unicorn_pot_balance").state + == "10000.00" + ) + assert ( + hass.states.get("sensor.mythical_randomsavings_castle_mortgage_balance").state + == "7500.00" + ) + + mock_simplefin_client.return_value.fetch_data.side_effect = side_effect + freezer.tick(timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + sensors = [ + "sensor.the_bank_of_go_the_bank_balance", + "sensor.investments_my_checking_balance", + "sensor.the_bank_of_go_prime_savings_balance", + "sensor.random_bank_costco_anywhere_visa_r_card_balance", + "sensor.investments_dr_evil_balance", + "sensor.investments_nerdcorp_series_b_balance", + "sensor.mythical_randomsavings_unicorn_pot_balance", + "sensor.mythical_randomsavings_castle_mortgage_balance", + ] + + for sensor in sensors: + assert hass.states.get(sensor).state == STATE_UNAVAILABLE diff --git a/tests/components/simplisafe/conftest.py b/tests/components/simplisafe/conftest.py index aaf853863e5..12ed845c7d2 100644 --- a/tests/components/simplisafe/conftest.py +++ b/tests/components/simplisafe/conftest.py @@ -1,10 +1,10 @@ """Define test fixtures for SimpliSafe.""" +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, Mock, patch import pytest from simplipy.system.v3 import SystemV3 -from typing_extensions import AsyncGenerator from homeassistant.components.simplisafe.const import DOMAIN from homeassistant.const import CONF_CODE, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME diff --git a/tests/components/simplisafe/test_diagnostics.py b/tests/components/simplisafe/test_diagnostics.py index 6948f98b159..31bd44c6146 100644 --- a/tests/components/simplisafe/test_diagnostics.py +++ b/tests/components/simplisafe/test_diagnostics.py @@ -3,6 +3,7 @@ from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -28,6 +29,8 @@ async def test_entry_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, "subscription_data": { "12345": { diff --git a/tests/components/simulated/test_sensor.py b/tests/components/simulated/test_sensor.py index d32eca8c66e..b167147367a 100644 --- a/tests/components/simulated/test_sensor.py +++ b/tests/components/simulated/test_sensor.py @@ -16,13 +16,17 @@ from homeassistant.components.simulated.sensor import ( DEFAULT_PHASE, DEFAULT_RELATIVE_TO_EPOCH, DEFAULT_SEED, + DOMAIN, ) from homeassistant.const import CONF_FRIENDLY_NAME from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component -async def test_simulated_sensor_default_config(hass: HomeAssistant) -> None: +async def test_simulated_sensor_default_config( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test default config.""" config = {"sensor": {"platform": "simulated"}} assert await async_setup_component(hass, "sensor", config) @@ -40,3 +44,7 @@ async def test_simulated_sensor_default_config(hass: HomeAssistant) -> None: assert state.attributes.get(CONF_FWHM) == DEFAULT_FWHM assert state.attributes.get(CONF_SEED) == DEFAULT_SEED assert state.attributes.get(CONF_RELATIVE_TO_EPOCH) == DEFAULT_RELATIVE_TO_EPOCH + + issue = issue_registry.async_get_issue(DOMAIN, DOMAIN) + assert issue.issue_id == DOMAIN + assert issue.translation_key == "simulated_deprecation" diff --git a/tests/components/sleepiq/conftest.py b/tests/components/sleepiq/conftest.py index fd07cc414e7..a9456bd3cc6 100644 --- a/tests/components/sleepiq/conftest.py +++ b/tests/components/sleepiq/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch from asyncsleepiq import ( @@ -17,7 +18,6 @@ from asyncsleepiq import ( SleepIQSleeper, ) import pytest -from typing_extensions import Generator from homeassistant.components.sleepiq import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME diff --git a/tests/components/slimproto/conftest.py b/tests/components/slimproto/conftest.py index ece30d3e5cf..1bb2d7f2628 100644 --- a/tests/components/slimproto/conftest.py +++ b/tests/components/slimproto/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.slimproto.const import DOMAIN diff --git a/tests/components/smartthings/test_fan.py b/tests/components/smartthings/test_fan.py index 043c022b225..b78c453b402 100644 --- a/tests/components/smartthings/test_fan.py +++ b/tests/components/smartthings/test_fan.py @@ -39,7 +39,12 @@ async def test_entity_state(hass: HomeAssistant, device_factory) -> None: # Dimmer 1 state = hass.states.get("fan.fan_1") assert state.state == "on" - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -100,7 +105,12 @@ async def test_setup_mode_capability(hass: HomeAssistant, device_factory) -> Non # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.PRESET_MODE + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PRESET_MODE] == "high" assert state.attributes[ATTR_PRESET_MODES] == ["high", "low", "medium"] @@ -122,7 +132,12 @@ async def test_setup_speed_capability(hass: HomeAssistant, device_factory) -> No # Assert state = hass.states.get("fan.fan_1") assert state is not None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == FanEntityFeature.SET_SPEED + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == FanEntityFeature.SET_SPEED + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) assert state.attributes[ATTR_PERCENTAGE] == 66 @@ -151,7 +166,10 @@ async def test_setup_both_capabilities(hass: HomeAssistant, device_factory) -> N assert state is not None assert ( state.attributes[ATTR_SUPPORTED_FEATURES] - == FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + == FanEntityFeature.SET_SPEED + | FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON ) assert state.attributes[ATTR_PERCENTAGE] == 66 assert state.attributes[ATTR_PRESET_MODE] == "high" diff --git a/tests/components/snapcast/conftest.py b/tests/components/snapcast/conftest.py index e5806ac5f40..bcc0ac5bc30 100644 --- a/tests/components/snapcast/conftest.py +++ b/tests/components/snapcast/conftest.py @@ -1,9 +1,9 @@ """Test the snapcast config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/snips/test_init.py b/tests/components/snips/test_init.py index 89ee211b38f..82dbf1cd281 100644 --- a/tests/components/snips/test_init.py +++ b/tests/components/snips/test_init.py @@ -6,10 +6,10 @@ import logging import pytest import voluptuous as vol -from homeassistant.bootstrap import async_setup_component from homeassistant.components import snips from homeassistant.core import HomeAssistant from homeassistant.helpers.intent import ServiceIntentHandler, async_register +from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message, async_mock_intent, async_mock_service from tests.typing import MqttMockHAClient diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 08340487d99..c34d0c011a3 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -7,16 +7,10 @@ import pytest from homeassistant.components.solarlog.const import DOMAIN as SOLARLOG_DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import HomeAssistant from .const import HOST, NAME -from tests.common import ( - MockConfigEntry, - load_json_object_fixture, - mock_device_registry, - mock_registry, -) +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture @@ -60,7 +54,7 @@ def mock_solarlog_connector(): @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.solarlog.async_setup_entry", return_value=True @@ -76,15 +70,3 @@ def mock_test_connection(): return_value=True, ): yield - - -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass: HomeAssistant): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass: HomeAssistant): - """Return an empty, loaded, registry.""" - return mock_registry(hass) diff --git a/tests/components/solarlog/test_init.py b/tests/components/solarlog/test_init.py index f9f00ef601b..0044d09f20e 100644 --- a/tests/components/solarlog/test_init.py +++ b/tests/components/solarlog/test_init.py @@ -50,7 +50,9 @@ async def test_raise_config_entry_not_ready_when_offline( async def test_migrate_config_entry( - hass: HomeAssistant, device_reg: DeviceRegistry, entity_reg: EntityRegistry + hass: HomeAssistant, + device_registry: DeviceRegistry, + entity_registry: EntityRegistry, ) -> None: """Test successful migration of entry data.""" entry = MockConfigEntry( @@ -64,13 +66,13 @@ async def test_migrate_config_entry( ) entry.add_to_hass(hass) - device = device_reg.async_get_or_create( + device = device_registry.async_get_or_create( config_entry_id=entry.entry_id, identifiers={(DOMAIN, entry.entry_id)}, manufacturer="Solar-Log", name="solarlog", ) - sensor_entity = entity_reg.async_get_or_create( + sensor_entity = entity_registry.async_get_or_create( config_entry=entry, platform=DOMAIN, domain=Platform.SENSOR, @@ -85,7 +87,7 @@ async def test_migrate_config_entry( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - entity_migrated = entity_reg.async_get(sensor_entity.entity_id) + entity_migrated = entity_registry.async_get(sensor_entity.entity_id) assert entity_migrated assert entity_migrated.unique_id == f"{entry.entry_id}_last_updated" diff --git a/tests/components/sonarr/conftest.py b/tests/components/sonarr/conftest.py index 739880a99aa..de7a3f781d7 100644 --- a/tests/components/sonarr/conftest.py +++ b/tests/components/sonarr/conftest.py @@ -1,5 +1,6 @@ """Fixtures for Sonarr integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch @@ -13,7 +14,6 @@ from aiopyarr import ( SystemStatus, ) import pytest -from typing_extensions import Generator from homeassistant.components.sonarr.const import ( CONF_BASE_PATH, diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 51dd2b9047c..bbec7a2308c 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,7 +1,7 @@ """Configuration for Sonos tests.""" import asyncio -from collections.abc import Callable +from collections.abc import Callable, Generator from copy import copy from ipaddress import ip_address from unittest.mock import AsyncMock, MagicMock, Mock, patch @@ -17,6 +17,7 @@ from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.sonos import DOMAIN from homeassistant.const import CONF_HOSTS from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture, load_json_value_fixture @@ -271,7 +272,7 @@ def soco_fixture(soco_factory): @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -453,6 +454,7 @@ def mock_get_music_library_information( "object.container.album.musicAlbum", ) ] + return [] @pytest.fixture(name="music_library_browse_categories") @@ -660,3 +662,26 @@ def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): await hass.async_block_till_done(wait_background_tasks=True) return _wrapper + + +@pytest.fixture(name="sonos_setup_two_speakers") +async def sonos_setup_two_speakers( + hass: HomeAssistant, soco_factory: SoCoMockFactory +) -> list[MockSoCo]: + """Set up home assistant with two Sonos Speakers.""" + soco_lr = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") + soco_br = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") + await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["10.10.10.1", "10.10.10.2"], + } + } + }, + ) + await hass.async_block_till_done() + return [soco_lr, soco_br] diff --git a/tests/components/sonos/fixtures/av_transport.json b/tests/components/sonos/fixtures/av_transport.json new file mode 100644 index 00000000000..743ac61e3ff --- /dev/null +++ b/tests/components/sonos/fixtures/av_transport.json @@ -0,0 +1,38 @@ +{ + "transport_state": "PLAYING", + "current_play_mode": "NORMAL", + "current_crossfade_mode": "0", + "number_of_tracks": "1", + "current_track": "1", + "current_section": "0", + "current_track_uri": "x-rincon:RINCON_test_10.10.10.2", + "current_track_duration": "", + "current_track_meta_data": "", + "next_track_uri": "", + "next_track_meta_data": "", + "enqueued_transport_uri": "", + "enqueued_transport_uri_meta_data": "", + "playback_storage_medium": "NETWORK", + "av_transport_uri": "x-rincon:RINCON_test_10.10.10.2", + "av_transport_uri_meta_data": "", + "next_av_transport_uri": "", + "next_av_transport_uri_meta_data": "", + "current_transport_actions": "Stop, Play", + "current_valid_play_modes": "CROSSFADE", + "direct_control_client_id": "", + "direct_control_is_suspended": "0", + "direct_control_account_id": "", + "transport_status": "OK", + "sleep_timer_generation": "0", + "alarm_running": "0", + "snooze_running": "0", + "restart_pending": "0", + "transport_play_speed": "NOT_IMPLEMENTED", + "current_media_duration": "NOT_IMPLEMENTED", + "record_storage_medium": "NOT_IMPLEMENTED", + "possible_playback_storage_media": "NONE, NETWORK", + "possible_record_storage_media": "NOT_IMPLEMENTED", + "record_medium_write_status": "NOT_IMPLEMENTED", + "current_record_quality_mode": "NOT_IMPLEMENTED", + "possible_record_quality_modes": "NOT_IMPLEMENTED" +} diff --git a/tests/components/sonos/fixtures/zgs_group.xml b/tests/components/sonos/fixtures/zgs_group.xml new file mode 100644 index 00000000000..58f40be0049 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_group.xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/tests/components/sonos/fixtures/zgs_two_single.xml b/tests/components/sonos/fixtures/zgs_two_single.xml new file mode 100644 index 00000000000..18c3c9231c6 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_two_single.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index ab9b598bb04..c765ed82ac6 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -2,6 +2,7 @@ import logging from typing import Any +from unittest.mock import patch import pytest @@ -12,9 +13,23 @@ from homeassistant.components.media_player import ( SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, ) -from homeassistant.components.sonos.const import SOURCE_LINEIN, SOURCE_TV -from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT -from homeassistant.const import STATE_IDLE +from homeassistant.components.sonos.const import ( + DOMAIN as SONOS_DOMAIN, + SOURCE_LINEIN, + SOURCE_TV, +) +from homeassistant.components.sonos.media_player import ( + LONG_SERVICE_TIMEOUT, + SERVICE_RESTORE, + SERVICE_SNAPSHOT, + VOLUME_INCREMENT, +) +from homeassistant.const import ( + SERVICE_VOLUME_DOWN, + SERVICE_VOLUME_SET, + SERVICE_VOLUME_UP, + STATE_IDLE, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.device_registry import ( @@ -22,8 +37,9 @@ from homeassistant.helpers.device_registry import ( CONNECTION_UPNP, DeviceRegistry, ) +from homeassistant.setup import async_setup_component -from .conftest import MockMusicServiceItem, SoCoMockFactory +from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory async def test_device_registry( @@ -707,3 +723,92 @@ async def test_play_media_favorite_item_id( blocking=True, ) assert "UNKNOWN_ID" in str(sve.value) + + +async def _setup_hass(hass: HomeAssistant): + await async_setup_component( + hass, + SONOS_DOMAIN, + { + "sonos": { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["10.10.10.1", "10.10.10.2"], + } + } + }, + ) + await hass.async_block_till_done() + + +async def test_service_snapshot_restore( + hass: HomeAssistant, + soco_factory: SoCoMockFactory, +) -> None: + """Test the snapshot and restore services.""" + soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") + soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") + await _setup_hass(hass) + with patch( + "homeassistant.components.sonos.speaker.Snapshot.snapshot" + ) as mock_snapshot: + await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_SNAPSHOT, + { + "entity_id": ["media_player.living_room", "media_player.bedroom"], + }, + blocking=True, + ) + assert mock_snapshot.call_count == 2 + + with patch( + "homeassistant.components.sonos.speaker.Snapshot.restore" + ) as mock_restore: + await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_RESTORE, + { + "entity_id": ["media_player.living_room", "media_player.bedroom"], + }, + blocking=True, + ) + assert mock_restore.call_count == 2 + + +async def test_volume( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the media player volume services.""" + initial_volume = soco.volume + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_UP, + { + "entity_id": "media_player.zone_a", + }, + blocking=True, + ) + assert soco.volume == initial_volume + VOLUME_INCREMENT + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_DOWN, + { + "entity_id": "media_player.zone_a", + }, + blocking=True, + ) + assert soco.volume == initial_volume + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_VOLUME_SET, + {"entity_id": "media_player.zone_a", "volume_level": 0.30}, + blocking=True, + ) + # SoCo uses 0..100 for its range. + assert soco.volume == 30 diff --git a/tests/components/sonos/test_speaker.py b/tests/components/sonos/test_speaker.py index 2c4357060be..40d126c64f2 100644 --- a/tests/components/sonos/test_speaker.py +++ b/tests/components/sonos/test_speaker.py @@ -4,11 +4,18 @@ from unittest.mock import patch import pytest +from homeassistant.components.media_player import ( + DOMAIN as MP_DOMAIN, + SERVICE_MEDIA_PLAY, +) +from homeassistant.components.sonos import DOMAIN from homeassistant.components.sonos.const import DATA_SONOS, SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import MockSoCo, SonosMockEvent + +from tests.common import async_fire_time_changed, load_fixture, load_json_value_fixture async def test_fallback_to_polling( @@ -67,3 +74,140 @@ async def test_subscription_creation_fails( await hass.async_block_till_done() assert speaker._subscriptions + + +def _create_zgs_sonos_event( + fixture_file: str, soco_1: MockSoCo, soco_2: MockSoCo, create_uui_ds: bool = True +) -> SonosMockEvent: + """Create a Sonos Event for zone group state, with the option of creating the uui_ds_in_group.""" + zgs = load_fixture(fixture_file, DOMAIN) + variables = {} + variables["ZoneGroupState"] = zgs + # Sonos does not always send this variable with zgs events + if create_uui_ds: + variables["zone_player_uui_ds_in_group"] = f"{soco_1.uid},{soco_2.uid}" + event = SonosMockEvent(soco_1, soco_1.zoneGroupTopology, variables) + if create_uui_ds: + event.zone_player_uui_ds_in_group = f"{soco_1.uid},{soco_2.uid}" + return event + + +def _create_avtransport_sonos_event( + fixture_file: str, soco: MockSoCo +) -> SonosMockEvent: + """Create a Sonos Event for an AVTransport update.""" + variables = load_json_value_fixture(fixture_file, DOMAIN) + return SonosMockEvent(soco, soco.avTransport, variables) + + +async def _media_play(hass: HomeAssistant, entity: str) -> None: + """Call media play service.""" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_MEDIA_PLAY, + { + "entity_id": entity, + }, + blocking=True, + ) + + +async def test_zgs_event_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Tests grouping and ungrouping two speakers.""" + # When Sonos speakers are grouped; one of the speakers is the coordinator and is in charge + # of playback across both speakers. Hence, service calls to play or pause on media_players + # that are part of the group are routed to the coordinator. + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Initial state - speakers are not grouped + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Each speaker is its own coordinator and calls should route to their SoCos + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2 - Group the speakers, living room is the coordinator + event = _create_zgs_sonos_event( + "zgs_group.xml", soco_lr, soco_br, create_uui_ds=True + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + # Play calls should route to the living room SoCo + await _media_play(hass, "media_player.living_room") + await _media_play(hass, "media_player.bedroom") + assert soco_lr.play.call_count == 2 + assert soco_br.play.call_count == 0 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 3 - Ungroup the speakers + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Calls should route to each speakers Soco + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + +async def test_zgs_avtransport_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Test processing avtransport and zgs events to change group membership.""" + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Send a transport event changing the coordinator + # for the living room speaker to the bedroom speaker. + event = _create_avtransport_sonos_event("av_transport.json", soco_lr) + soco_lr.avTransport.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the new coodinator which is the bedroom + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 0 + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2- Send a zgs event to return living room to its own coordinator + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the living room + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + assert soco_br.play.call_count == 0 diff --git a/tests/components/spc/test_init.py b/tests/components/spc/test_init.py index 3dfea94a4bd..4f335e2f980 100644 --- a/tests/components/spc/test_init.py +++ b/tests/components/spc/test_init.py @@ -5,13 +5,13 @@ from unittest.mock import Mock, PropertyMock, patch import pyspcwebgw from pyspcwebgw.const import AreaMode -from homeassistant.bootstrap import async_setup_component from homeassistant.components.spc import DATA_API from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component -async def test_valid_device_config(hass: HomeAssistant, monkeypatch) -> None: +async def test_valid_device_config(hass: HomeAssistant) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} @@ -22,7 +22,7 @@ async def test_valid_device_config(hass: HomeAssistant, monkeypatch) -> None: assert await async_setup_component(hass, "spc", config) is True -async def test_invalid_device_config(hass: HomeAssistant, monkeypatch) -> None: +async def test_invalid_device_config(hass: HomeAssistant) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/"}} diff --git a/tests/components/srp_energy/conftest.py b/tests/components/srp_energy/conftest.py index 45eb726443f..b612bc9f3f3 100644 --- a/tests/components/srp_energy/conftest.py +++ b/tests/components/srp_energy/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator import datetime as dt from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator from homeassistant.components.srp_energy.const import DOMAIN, PHOENIX_TIME_ZONE from homeassistant.const import CONF_ID diff --git a/tests/components/srp_energy/test_config_flow.py b/tests/components/srp_energy/test_config_flow.py index 19e21f0e1a0..e3abb3c98df 100644 --- a/tests/components/srp_energy/test_config_flow.py +++ b/tests/components/srp_energy/test_config_flow.py @@ -2,6 +2,8 @@ from unittest.mock import MagicMock, patch +import pytest + from homeassistant.components.srp_energy.const import CONF_IS_TOU, DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_SOURCE, CONF_USERNAME @@ -23,8 +25,9 @@ from . import ( from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_srp_energy_config_flow") async def test_show_form( - hass: HomeAssistant, mock_srp_energy_config_flow: MagicMock, capsys + hass: HomeAssistant, capsys: pytest.CaptureFixture[str] ) -> None: """Test show configuration form.""" result = await hass.config_entries.flow.async_init( @@ -140,7 +143,7 @@ async def test_flow_entry_already_configured( async def test_flow_multiple_configs( - hass: HomeAssistant, init_integration: MockConfigEntry, capsys + hass: HomeAssistant, init_integration: MockConfigEntry ) -> None: """Test multiple config entries.""" # Verify mock config setup from fixture diff --git a/tests/components/srp_energy/test_sensor.py b/tests/components/srp_energy/test_sensor.py index 7369d07f77a..025d9fe49ca 100644 --- a/tests/components/srp_energy/test_sensor.py +++ b/tests/components/srp_energy/test_sensor.py @@ -1,6 +1,5 @@ """Tests for the srp_energy sensor platform.""" -import time from unittest.mock import patch from requests.models import HTTPError @@ -80,7 +79,7 @@ async def test_srp_entity_timeout( ): client = srp_energy_mock.return_value client.validate.return_value = True - client.usage = lambda _, __, ___: time.sleep(1) + client.usage = lambda _, __, ___: None mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) diff --git a/tests/components/startca/test_sensor.py b/tests/components/startca/test_sensor.py index b0d43af1cae..be5524eb650 100644 --- a/tests/components/startca/test_sensor.py +++ b/tests/components/startca/test_sensor.py @@ -2,11 +2,11 @@ from http import HTTPStatus -from homeassistant.bootstrap import async_setup_component from homeassistant.components.startca.sensor import StartcaData from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, UnitOfInformation from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.setup import async_setup_component from tests.test_util.aiohttp import AiohttpClientMocker diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 3cf3de54940..0142d71a805 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -13,13 +13,13 @@ so that it can inspect the output. from __future__ import annotations import asyncio +from collections.abc import Generator import logging import threading from unittest.mock import Mock, patch from aiohttp import web import pytest -from typing_extensions import Generator from homeassistant.components.stream.core import StreamOutput from homeassistant.components.stream.worker import StreamState diff --git a/tests/components/stream/test_recorder.py b/tests/components/stream/test_recorder.py index 515f3fff82d..8e079cded45 100644 --- a/tests/components/stream/test_recorder.py +++ b/tests/components/stream/test_recorder.py @@ -35,7 +35,7 @@ from tests.common import async_fire_time_changed @pytest.fixture(autouse=True) -async def stream_component(hass): +async def stream_component(hass: HomeAssistant) -> None: """Set up the component before each test.""" await async_setup_component(hass, "stream", {"stream": {}}) @@ -305,7 +305,5 @@ async def test_record_stream_rotate(hass: HomeAssistant, filename, h264_video) - # Assert assert os.path.exists(filename) - with open(filename, "rb") as rotated_mp4: - assert_mp4_has_transform_matrix( - rotated_mp4.read(), stream.dynamic_stream_settings.orientation - ) + data = await hass.async_add_executor_job(Path(filename).read_bytes) + assert_mp4_has_transform_matrix(data, stream.dynamic_stream_settings.orientation) diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 7226adc7d7e..a96866eac4b 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -83,7 +83,7 @@ def filename(tmp_path: Path) -> str: @pytest.fixture(autouse=True) -def mock_stream_settings(hass): +def mock_stream_settings(hass: HomeAssistant) -> None: """Set the stream settings data in hass before each test.""" hass.data[DOMAIN] = { ATTR_SETTINGS: StreamSettings( diff --git a/tests/components/streamlabswater/conftest.py b/tests/components/streamlabswater/conftest.py index 5a53c7204fa..1bbdd3e9a08 100644 --- a/tests/components/streamlabswater/conftest.py +++ b/tests/components/streamlabswater/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the StreamLabs tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from streamlabswater.streamlabswater import StreamlabsClient -from typing_extensions import Generator from homeassistant.components.streamlabswater import DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index d28d9c308a7..ca2685ff827 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,12 +1,11 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from http import HTTPStatus from pathlib import Path from unittest.mock import AsyncMock import pytest -from typing_extensions import Generator from homeassistant.components.stt import ( DOMAIN, diff --git a/tests/components/suez_water/conftest.py b/tests/components/suez_water/conftest.py index 51ade6009dc..f218fb7d833 100644 --- a/tests/components/suez_water/conftest.py +++ b/tests/components/suez_water/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Suez Water tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/sun/test_trigger.py b/tests/components/sun/test_trigger.py index fc1af35faea..303ca3b80cd 100644 --- a/tests/components/sun/test_trigger.py +++ b/tests/components/sun/test_trigger.py @@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, async_mock_service, mock_component +from tests.common import async_fire_time_changed, mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -26,14 +26,8 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -41,7 +35,9 @@ def setup_comp(hass): ) -async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_sunset_trigger( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the sunset trigger.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 2, tzinfo=dt_util.UTC) @@ -67,10 +63,11 @@ async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 1 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 1 with freeze_time(now): await hass.services.async_call( @@ -79,14 +76,17 @@ async def test_sunset_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[2].data["id"] == 0 -async def test_sunrise_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_sunrise_trigger( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test the sunrise trigger.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) trigger_time = datetime(2015, 9, 16, 14, tzinfo=dt_util.UTC) @@ -105,11 +105,11 @@ async def test_sunrise_trigger(hass: HomeAssistant, calls: list[ServiceCall]) -> async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_sunset_trigger_with_offset( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the sunset trigger with offset.""" now = datetime(2015, 9, 15, 23, tzinfo=dt_util.UTC) @@ -142,12 +142,12 @@ async def test_sunset_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "sun - sunset - 0:30:00" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "sun - sunset - 0:30:00" async def test_sunrise_trigger_with_offset( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test the sunrise trigger with offset.""" now = datetime(2015, 9, 13, 23, tzinfo=dt_util.UTC) @@ -171,4 +171,4 @@ async def test_sunrise_trigger_with_offset( async_fire_time_changed(hass, trigger_time) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/surepetcare/conftest.py b/tests/components/surepetcare/conftest.py index 9ae1bfe310a..5dcc5dfdadc 100644 --- a/tests/components/surepetcare/conftest.py +++ b/tests/components/surepetcare/conftest.py @@ -17,6 +17,7 @@ from tests.common import MockConfigEntry async def _mock_call(method, resource): if method == "GET" and resource == MESTART_RESOURCE: return {"data": MOCK_API_DATA} + return None @pytest.fixture diff --git a/tests/components/swiss_public_transport/conftest.py b/tests/components/swiss_public_transport/conftest.py index c139b99e54d..88bd233765b 100644 --- a/tests/components/swiss_public_transport/conftest.py +++ b/tests/components/swiss_public_transport/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the swiss_public_transport tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switch/test_device_action.py b/tests/components/switch/test_device_action.py index 0b41ce7992d..9751721cbc7 100644 --- a/tests/components/switch/test_device_action.py +++ b/tests/components/switch/test_device_action.py @@ -7,7 +7,7 @@ from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.switch import DOMAIN from homeassistant.const import EntityCategory -from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component @@ -24,12 +24,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_actions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -115,7 +109,6 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -190,7 +183,6 @@ async def test_action_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off actions.""" config_entry = MockConfigEntry(domain="test", data={}) diff --git a/tests/components/switch/test_device_condition.py b/tests/components/switch/test_device_condition.py index 2ba2c6adb5c..7c4f434b0a4 100644 --- a/tests/components/switch/test_device_condition.py +++ b/tests/components/switch/test_device_condition.py @@ -20,7 +20,6 @@ from tests.common import ( MockConfigEntry, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -183,7 +176,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -249,20 +242,20 @@ async def test_if_state( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_off event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_off event - test_event2" @pytest.mark.usefixtures("enable_custom_integrations") @@ -270,7 +263,7 @@ async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -315,12 +308,12 @@ async def test_if_state_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_on event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_on event - test_event1" @pytest.mark.usefixtures("enable_custom_integrations") @@ -328,7 +321,7 @@ async def test_if_fires_on_for_condition( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for firing if condition is on with delay.""" point1 = dt_util.utcnow() @@ -377,26 +370,26 @@ async def test_if_fires_on_for_condition( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 10 secs into the future freezer.move_to(point2) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Time travel 20 secs into the future freezer.move_to(point3) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_off event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_off event - test_event1" diff --git a/tests/components/switch/test_device_trigger.py b/tests/components/switch/test_device_trigger.py index 092b7a964bb..08e6ab6d0f6 100644 --- a/tests/components/switch/test_device_trigger.py +++ b/tests/components/switch/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -181,7 +174,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -268,20 +261,20 @@ async def test_if_fires_on_state_change( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 2 - assert {calls[0].data["some"], calls[1].data["some"]} == { + assert len(service_calls) == 2 + assert {service_calls[0].data["some"], service_calls[1].data["some"]} == { f"turn_off device - {entry.entity_id} - on - off - None", f"turn_on_or_off device - {entry.entity_id} - on - off - None", } hass.states.async_set(entry.entity_id, STATE_ON) await hass.async_block_till_done() - assert len(calls) == 4 - assert {calls[2].data["some"], calls[3].data["some"]} == { + assert len(service_calls) == 4 + assert {service_calls[2].data["some"], service_calls[3].data["some"]} == { f"turn_on device - {entry.entity_id} - off - on - None", f"turn_on_or_off device - {entry.entity_id} - off - on - None", } @@ -292,7 +285,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -337,13 +330,13 @@ async def test_if_fires_on_state_change_legacy( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - None" ) @@ -353,7 +346,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -399,16 +392,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - on - off - 0:00:05" ) diff --git a/tests/components/switch_as_x/conftest.py b/tests/components/switch_as_x/conftest.py index 88a86892d2d..f8328f38b54 100644 --- a/tests/components/switch_as_x/conftest.py +++ b/tests/components/switch_as_x/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/switch_as_x/test_fan.py b/tests/components/switch_as_x/test_fan.py index fd4296bd616..a33490dab45 100644 --- a/tests/components/switch_as_x/test_fan.py +++ b/tests/components/switch_as_x/test_fan.py @@ -44,7 +44,7 @@ async def test_default_state(hass: HomeAssistant) -> None: state = hass.states.get("fan.wind_machine") assert state is not None assert state.state == "unavailable" - assert state.attributes["supported_features"] == 0 + assert state.attributes["supported_features"] == 48 async def test_service_calls(hass: HomeAssistant) -> None: diff --git a/tests/components/switchbot_cloud/conftest.py b/tests/components/switchbot_cloud/conftest.py index ed233ff2de9..b559930dedb 100644 --- a/tests/components/switchbot_cloud/conftest.py +++ b/tests/components/switchbot_cloud/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the SwitchBot via API tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switcher_kis/conftest.py b/tests/components/switcher_kis/conftest.py index 8ff395fcab3..2cf123af2b0 100644 --- a/tests/components/switcher_kis/conftest.py +++ b/tests/components/switcher_kis/conftest.py @@ -1,9 +1,9 @@ """Common fixtures and objects for the Switcher integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/switcher_kis/test_button.py b/tests/components/switcher_kis/test_button.py index 264c163e111..d0604487370 100644 --- a/tests/components/switcher_kis/test_button.py +++ b/tests/components/switcher_kis/test_button.py @@ -63,7 +63,12 @@ async def test_assume_button( ) @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_swing_button( - hass: HomeAssistant, entity, swing, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, + entity, + swing, + mock_bridge, + mock_api, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test vertical swing on/off button.""" monkeypatch.setattr(DEVICE, "remote_id", "ELEC7022") @@ -88,7 +93,7 @@ async def test_swing_button( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_control_device_fail( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test control device fail.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_climate.py b/tests/components/switcher_kis/test_climate.py index 759f7f1bd98..5da9684bf2a 100644 --- a/tests/components/switcher_kis/test_climate.py +++ b/tests/components/switcher_kis/test_climate.py @@ -37,7 +37,7 @@ ENTITY_ID = f"{CLIMATE_DOMAIN}.{slugify(DEVICE.name)}" @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_hvac_mode( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate hvac mode service.""" await init_integration(hass) @@ -92,7 +92,7 @@ async def test_climate_hvac_mode( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_temperature( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate temperature service.""" await init_integration(hass) @@ -144,7 +144,7 @@ async def test_climate_temperature( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_fan_level( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate fan level service.""" await init_integration(hass) @@ -179,7 +179,7 @@ async def test_climate_fan_level( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_swing( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test climate swing service.""" await init_integration(hass) @@ -234,9 +234,7 @@ async def test_climate_swing( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_control_device_fail( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch -) -> None: +async def test_control_device_fail(hass: HomeAssistant, mock_bridge, mock_api) -> None: """Test control device fail.""" await init_integration(hass) assert mock_bridge @@ -295,7 +293,7 @@ async def test_control_device_fail( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_bad_update_discard( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test that a bad update from device is discarded.""" await init_integration(hass) @@ -318,7 +316,7 @@ async def test_bad_update_discard( @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) async def test_climate_control_errors( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test control with settings not supported by device.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 07f349d1a72..57e2f98915e 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -31,7 +31,9 @@ ENTITY_ID = f"{COVER_DOMAIN}.{slugify(DEVICE.name)}" @pytest.mark.parametrize("mock_bridge", [[DEVICE]], indirect=True) -async def test_cover(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: +async def test_cover( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch +) -> None: """Test cover services.""" await init_integration(hass) assert mock_bridge diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index f49ab99ba6c..c8df4dd0b83 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -1,17 +1,23 @@ """Tests for the diagnostics data provided by Switcher.""" +import pytest + from homeassistant.components.diagnostics import REDACTED from homeassistant.core import HomeAssistant from . import init_integration from .consts import DUMMY_WATER_HEATER_DEVICE +from tests.common import ANY from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_diagnostics( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_bridge, monkeypatch + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_bridge, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test diagnostics.""" entry = await init_integration(hass) @@ -59,5 +65,7 @@ async def test_diagnostics( "source": "user", "unique_id": "switcher_kis", "disabled_by": None, + "created_at": ANY, + "modified_at": ANY, }, } diff --git a/tests/components/switcher_kis/test_sensor.py b/tests/components/switcher_kis/test_sensor.py index 1be2efed987..8ccc33f2d37 100644 --- a/tests/components/switcher_kis/test_sensor.py +++ b/tests/components/switcher_kis/test_sensor.py @@ -74,7 +74,9 @@ async def test_sensor_disabled( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_sensor_update(hass: HomeAssistant, mock_bridge, monkeypatch) -> None: +async def test_sensor_update( + hass: HomeAssistant, mock_bridge, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sensor update.""" await init_integration(hass) assert mock_bridge diff --git a/tests/components/switcher_kis/test_services.py b/tests/components/switcher_kis/test_services.py index 039daec4c97..26c54ee53ed 100644 --- a/tests/components/switcher_kis/test_services.py +++ b/tests/components/switcher_kis/test_services.py @@ -30,7 +30,7 @@ from .consts import ( @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) async def test_turn_on_with_timer_service( - hass: HomeAssistant, mock_bridge, mock_api, monkeypatch + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch ) -> None: """Test the turn on with timer service.""" await init_integration(hass) diff --git a/tests/components/switcher_kis/test_switch.py b/tests/components/switcher_kis/test_switch.py index 058546ac2ae..f14a8f5b1ca 100644 --- a/tests/components/switcher_kis/test_switch.py +++ b/tests/components/switcher_kis/test_switch.py @@ -23,7 +23,9 @@ from .consts import DUMMY_PLUG_DEVICE, DUMMY_WATER_HEATER_DEVICE @pytest.mark.parametrize("mock_bridge", [[DUMMY_WATER_HEATER_DEVICE]], indirect=True) -async def test_switch(hass: HomeAssistant, mock_bridge, mock_api, monkeypatch) -> None: +async def test_switch( + hass: HomeAssistant, mock_bridge, mock_api, monkeypatch: pytest.MonkeyPatch +) -> None: """Test the switch.""" await init_integration(hass) assert mock_bridge @@ -75,7 +77,7 @@ async def test_switch_control_fail( hass: HomeAssistant, mock_bridge, mock_api, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture, ) -> None: """Test switch control fail.""" diff --git a/tests/components/synology_dsm/conftest.py b/tests/components/synology_dsm/conftest.py index 2f05d0187be..0e8f79ffd40 100644 --- a/tests/components/synology_dsm/conftest.py +++ b/tests/components/synology_dsm/conftest.py @@ -1,9 +1,9 @@ """Configure Synology DSM tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index 433a4b15c23..f7ab26997ba 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -4,6 +4,7 @@ from pathlib import Path import tempfile from unittest.mock import AsyncMock, MagicMock, patch +from aiohttp import web import pytest from synology_dsm.api.photos import SynoPhotosAlbum, SynoPhotosItem from synology_dsm.exceptions import SynologyDSMException @@ -30,7 +31,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import HomeAssistant -from homeassistant.util.aiohttp import MockRequest, web +from homeassistant.util.aiohttp import MockRequest from .consts import HOST, MACS, PASSWORD, PORT, USE_SSL, USERNAME diff --git a/tests/components/system_bridge/__init__.py b/tests/components/system_bridge/__init__.py index edbe5469705..0606ce8e258 100644 --- a/tests/components/system_bridge/__init__.py +++ b/tests/components/system_bridge/__init__.py @@ -1,38 +1,52 @@ """Tests for the System Bridge integration.""" from collections.abc import Awaitable, Callable -from dataclasses import asdict from ipaddress import ip_address from typing import Any -from systembridgeconnector.const import TYPE_DATA_UPDATE -from systembridgemodels.const import MODEL_SYSTEM -from systembridgemodels.modules import System -from systembridgemodels.response import Response +from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY +from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU +from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS +from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS +from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS +from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA +from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY +from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES +from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM +from systembridgemodels.modules import Module, ModulesData from homeassistant.components import zeroconf from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant -FIXTURE_MAC_ADDRESS = "aa:bb:cc:dd:ee:ff" -FIXTURE_UUID = "e91bf575-56f3-4c83-8f42-70ac17adcd33" +from tests.common import MockConfigEntry -FIXTURE_AUTH_INPUT = {CONF_TOKEN: "abc-123-def-456-ghi"} +FIXTURE_TITLE = "TestSystem" + +FIXTURE_REQUEST_ID = "test" + +FIXTURE_MAC_ADDRESS = FIXTURE_SYSTEM.mac_address +FIXTURE_UUID = FIXTURE_SYSTEM.uuid + +FIXTURE_AUTH_INPUT = { + CONF_TOKEN: "abc-123-def-456-ghi", +} FIXTURE_USER_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: "test-bridge", + CONF_HOST: "127.0.0.1", CONF_PORT: "9170", } FIXTURE_ZEROCONF_INPUT = { CONF_TOKEN: "abc-123-def-456-ghi", - CONF_HOST: "1.1.1.1", + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], CONF_PORT: "9170", } FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], + ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), + ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -41,7 +55,7 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( "address": "http://test-bridge:9170", "fqdn": "test-bridge", "host": "test-bridge", - "ip": "1.1.1.1", + "ip": FIXTURE_USER_INPUT[CONF_HOST], "mac": FIXTURE_MAC_ADDRESS, "port": "9170", "uuid": FIXTURE_UUID, @@ -49,8 +63,8 @@ FIXTURE_ZEROCONF = zeroconf.ZeroconfServiceInfo( ) FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("1.1.1.1"), - ip_addresses=[ip_address("1.1.1.1")], + ip_address=ip_address(FIXTURE_USER_INPUT[CONF_HOST]), + ip_addresses=[ip_address(FIXTURE_USER_INPUT[CONF_HOST])], port=9170, hostname="test-bridge.local.", type="_system-bridge._tcp.local.", @@ -60,57 +74,37 @@ FIXTURE_ZEROCONF_BAD = zeroconf.ZeroconfServiceInfo( }, ) - -FIXTURE_SYSTEM = System( - boot_time=1, - fqdn="", - hostname="1.1.1.1", - ip_address_4="1.1.1.1", - mac_address=FIXTURE_MAC_ADDRESS, - platform="", - platform_version="", - uptime=1, - uuid=FIXTURE_UUID, - version="", - version_latest="", - version_newer_available=False, - users=[], +FIXTURE_DATA_RESPONSE = ModulesData( + system=FIXTURE_SYSTEM, ) -FIXTURE_DATA_RESPONSE = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data=asdict(FIXTURE_SYSTEM), -) -FIXTURE_DATA_RESPONSE_BAD = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data={}, -) +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> bool: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) -FIXTURE_DATA_RESPONSE_BAD = Response( - id="1234", - type=TYPE_DATA_UPDATE, - subtype=None, - message="Data received", - module=MODEL_SYSTEM, - data={}, -) + setup_result = await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return setup_result async def mock_data_listener( - self, callback: Callable[[str, Any], Awaitable[None]] | None = None, _: bool = False, ): """Mock websocket data listener.""" if callback is not None: # Simulate data received from the websocket - await callback(MODEL_SYSTEM, FIXTURE_SYSTEM) + await callback(Module.BATTERY, FIXTURE_BATTERY) + await callback(Module.CPU, FIXTURE_CPU) + await callback(Module.DISKS, FIXTURE_DISKS) + await callback(Module.DISPLAYS, FIXTURE_DISPLAYS) + await callback(Module.GPUS, FIXTURE_GPUS) + await callback(Module.MEDIA, FIXTURE_MEDIA) + await callback(Module.MEMORY, FIXTURE_MEMORY) + await callback(Module.PROCESSES, FIXTURE_PROCESSES) + await callback(Module.SYSTEM, FIXTURE_SYSTEM) diff --git a/tests/components/system_bridge/conftest.py b/tests/components/system_bridge/conftest.py new file mode 100644 index 00000000000..2f1f87485e7 --- /dev/null +++ b/tests/components/system_bridge/conftest.py @@ -0,0 +1,195 @@ +"""Fixtures for System Bridge integration tests.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Final +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from systembridgeconnector.const import EventKey, EventType +from systembridgemodels.fixtures.modules.battery import FIXTURE_BATTERY +from systembridgemodels.fixtures.modules.cpu import FIXTURE_CPU +from systembridgemodels.fixtures.modules.disks import FIXTURE_DISKS +from systembridgemodels.fixtures.modules.displays import FIXTURE_DISPLAYS +from systembridgemodels.fixtures.modules.gpus import FIXTURE_GPUS +from systembridgemodels.fixtures.modules.media import FIXTURE_MEDIA +from systembridgemodels.fixtures.modules.memory import FIXTURE_MEMORY +from systembridgemodels.fixtures.modules.networks import FIXTURE_NETWORKS +from systembridgemodels.fixtures.modules.processes import FIXTURE_PROCESSES +from systembridgemodels.fixtures.modules.sensors import FIXTURE_SENSORS +from systembridgemodels.fixtures.modules.system import FIXTURE_SYSTEM +from systembridgemodels.media_directories import MediaDirectory +from systembridgemodels.media_files import MediaFile, MediaFiles +from systembridgemodels.modules import Module, ModulesData, RegisterDataListener +from systembridgemodels.response import Response + +from homeassistant.components.system_bridge.config_flow import SystemBridgeConfigFlow +from homeassistant.components.system_bridge.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant + +from . import ( + FIXTURE_REQUEST_ID, + FIXTURE_TITLE, + FIXTURE_USER_INPUT, + FIXTURE_UUID, + mock_data_listener, + setup_integration, +) + +from tests.common import MockConfigEntry + +REGISTER_MODULES: Final[list[Module]] = [ + Module.SYSTEM, +] + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock ConfigEntry.""" + return MockConfigEntry( + title=FIXTURE_TITLE, + domain=DOMAIN, + unique_id=FIXTURE_UUID, + version=SystemBridgeConfigFlow.VERSION, + minor_version=SystemBridgeConfigFlow.MINOR_VERSION, + data={ + CONF_HOST: FIXTURE_USER_INPUT[CONF_HOST], + CONF_PORT: FIXTURE_USER_INPUT[CONF_PORT], + CONF_TOKEN: FIXTURE_USER_INPUT[CONF_TOKEN], + }, + ) + + +@pytest.fixture(autouse=True) +def mock_setup_notify_platform() -> Generator[AsyncMock]: + """Mock notify platform setup.""" + with patch( + "homeassistant.helpers.discovery.async_load_platform", + ) as mock_setup_notify_platform: + yield mock_setup_notify_platform + + +@pytest.fixture +def mock_version() -> Generator[AsyncMock]: + """Return a mocked Version class.""" + with patch( + "homeassistant.components.system_bridge.Version", + autospec=True, + ) as mock_version: + version = mock_version.return_value + version.check_supported.return_value = True + + yield version + + +@pytest.fixture +def mock_websocket_client( + register_data_listener_model: RegisterDataListener = RegisterDataListener( + modules=REGISTER_MODULES, + ), +) -> Generator[MagicMock]: + """Return a mocked WebSocketClient client.""" + + with ( + patch( + "homeassistant.components.system_bridge.coordinator.WebSocketClient", + autospec=True, + ) as mock_websocket_client, + patch( + "homeassistant.components.system_bridge.config_flow.WebSocketClient", + new=mock_websocket_client, + ), + ): + websocket_client = mock_websocket_client.return_value + websocket_client.connected = False + websocket_client.get_data.return_value = ModulesData( + battery=FIXTURE_BATTERY, + cpu=FIXTURE_CPU, + disks=FIXTURE_DISKS, + displays=FIXTURE_DISPLAYS, + gpus=FIXTURE_GPUS, + media=FIXTURE_MEDIA, + memory=FIXTURE_MEMORY, + networks=FIXTURE_NETWORKS, + processes=FIXTURE_PROCESSES, + sensors=FIXTURE_SENSORS, + system=FIXTURE_SYSTEM, + ) + websocket_client.register_data_listener.return_value = Response( + id=FIXTURE_REQUEST_ID, + type=EventType.DATA_LISTENER_REGISTERED, + message="Data listener registered", + data={EventKey.MODULES: register_data_listener_model.modules}, + ) + # Trigger callback when listener is registered + websocket_client.listen.side_effect = mock_data_listener + + websocket_client.get_directories.return_value = [ + MediaDirectory( + key="documents", + path="/home/user/documents", + ) + ] + websocket_client.get_files.return_value = MediaFiles( + files=[ + MediaFile( + name="testsubdirectory", + path="testsubdirectory", + fullpath="/home/user/documents/testsubdirectory", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=True, + is_file=False, + is_link=False, + ), + MediaFile( + name="testfile.txt", + path="testfile.txt", + fullpath="/home/user/documents/testfile.txt", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=False, + is_file=True, + is_link=False, + mime_type="text/plain", + ), + MediaFile( + name="testfile.jpg", + path="testfile.jpg", + fullpath="/home/user/documents/testimage.jpg", + size=100, + last_accessed=1630000000, + created=1630000000, + modified=1630000000, + is_directory=False, + is_file=True, + is_link=False, + mime_type="image/jpeg", + ), + ], + path="", + ) + + yield websocket_client + + +@pytest.fixture +async def init_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_version: MagicMock, + mock_websocket_client: MagicMock, +) -> MockConfigEntry: + """Initialize the System Bridge integration.""" + assert await setup_integration(hass, mock_config_entry) + + assert mock_config_entry.state is ConfigEntryState.LOADED + + return mock_config_entry diff --git a/tests/components/system_bridge/snapshots/test_media_source.ambr b/tests/components/system_bridge/snapshots/test_media_source.ambr new file mode 100644 index 00000000000..53e0e8416e9 --- /dev/null +++ b/tests/components/system_bridge/snapshots/test_media_source.ambr @@ -0,0 +1,61 @@ +# serializer version: 1 +# name: test_directory[system_bridge_media_source_directory] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem - documents', + }) +# --- +# name: test_entry[system_bridge_media_source_entry] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem', + }) +# --- +# name: test_file[system_bridge_media_source_file_image] + dict({ + 'mime_type': 'image/jpeg', + 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testimage.jpg', + }) +# --- +# name: test_file[system_bridge_media_source_file_text] + dict({ + 'mime_type': 'text/plain', + 'url': 'http://127.0.0.1:9170/api/media/file/data?token=abc-123-def-456-ghi&base=documents&path=testfile.txt', + }) +# --- +# name: test_root[system_bridge_media_source_root] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'System Bridge', + }) +# --- +# name: test_subdirectory[system_bridge_media_source_subdirectory] + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_type': '', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'TestSystem - documents/testsubdirectory', + }) +# --- diff --git a/tests/components/system_bridge/test_config_flow.py b/tests/components/system_bridge/test_config_flow.py index 16a6f5d0f56..727d93de893 100644 --- a/tests/components/system_bridge/test_config_flow.py +++ b/tests/components/system_bridge/test_config_flow.py @@ -69,7 +69,7 @@ async def test_user_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "test-bridge" + assert result2["title"] == "127.0.0.1" assert result2["data"] == FIXTURE_USER_INPUT assert len(mock_setup_entry.mock_calls) == 1 @@ -441,7 +441,7 @@ async def test_zeroconf_flow(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "1.1.1.1" + assert result2["title"] == "127.0.0.1" assert result2["data"] == FIXTURE_ZEROCONF_INPUT assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/system_bridge/test_media_source.py b/tests/components/system_bridge/test_media_source.py new file mode 100644 index 00000000000..161d69569b6 --- /dev/null +++ b/tests/components/system_bridge/test_media_source.py @@ -0,0 +1,148 @@ +"""Test the System Bridge integration.""" + +import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import paths + +from homeassistant.components.media_player.errors import BrowseError +from homeassistant.components.media_source import ( + DOMAIN as MEDIA_SOURCE_DOMAIN, + URI_SCHEME, + async_browse_media, + async_resolve_media, +) +from homeassistant.components.system_bridge.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def setup_component(hass: HomeAssistant) -> None: + """Set up component.""" + assert await async_setup_component( + hass, + MEDIA_SOURCE_DOMAIN, + {}, + ) + + +async def test_root( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test root media browsing.""" + browse_media_root = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}", + ) + + assert browse_media_root.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_root", + exclude=paths("children", "media_content_id"), + ) + + +async def test_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing entry.""" + browse_media_entry = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}", + ) + + assert browse_media_entry.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_entry", + exclude=paths("children", "media_content_id"), + ) + + +async def test_directory( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing directory.""" + browse_media_directory = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents", + ) + + assert browse_media_directory.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_directory", + exclude=paths("children", "media_content_id"), + ) + + +async def test_subdirectory( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing directory.""" + browse_media_directory = await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testsubdirectory", + ) + + assert browse_media_directory.as_dict() == snapshot( + name=f"{DOMAIN}_media_source_subdirectory", + exclude=paths("children", "media_content_id"), + ) + + +async def test_file( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + init_integration: MockConfigEntry, +) -> None: + """Test browsing file.""" + resolve_media_file = await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testfile.txt~~text/plain", + None, + ) + + assert resolve_media_file == snapshot( + name=f"{DOMAIN}_media_source_file_text", + ) + + resolve_media_file = await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/{init_integration.entry_id}~~documents/testimage.jpg~~image/jpeg", + None, + ) + + assert resolve_media_file == snapshot( + name=f"{DOMAIN}_media_source_file_image", + ) + + +async def test_bad_entry( + hass: HomeAssistant, + init_integration: MockConfigEntry, +) -> None: + """Test invalid entry raises BrowseError.""" + with pytest.raises(BrowseError): + await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid", + ) + + with pytest.raises(BrowseError): + await async_browse_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory", + ) + + with pytest.raises(ValueError): + await async_resolve_media( + hass, + f"{URI_SCHEME}{DOMAIN}/badentryid~~baddirectory/badfile.txt~~text/plain", + None, + ) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index 918d995fab9..fb46d120acf 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -10,10 +10,10 @@ import traceback from typing import Any from unittest.mock import MagicMock, patch -from homeassistant.bootstrap import async_setup_component from homeassistant.components import system_log from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.typing import ConfigType +from homeassistant.setup import async_setup_component from tests.common import async_capture_events from tests.typing import WebSocketGenerator diff --git a/tests/components/systemmonitor/conftest.py b/tests/components/systemmonitor/conftest.py index 25611481433..5f0a7a5c76d 100644 --- a/tests/components/systemmonitor/conftest.py +++ b/tests/components/systemmonitor/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import socket from unittest.mock import AsyncMock, Mock, NonCallableMock, patch from psutil import NoSuchProcess, Process from psutil._common import sdiskpart, sdiskusage, shwtemp, snetio, snicaddr, sswap import pytest -from typing_extensions import Generator from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory diff --git a/tests/components/systemmonitor/test_diagnostics.py b/tests/components/systemmonitor/test_diagnostics.py index 78128aad5f4..b0f4fca3d0c 100644 --- a/tests/components/systemmonitor/test_diagnostics.py +++ b/tests/components/systemmonitor/test_diagnostics.py @@ -23,4 +23,4 @@ async def test_diagnostics( """Test diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, mock_added_config_entry - ) == snapshot(exclude=props("last_update", "entry_id")) + ) == snapshot(exclude=props("last_update", "entry_id", "created_at", "modified_at")) diff --git a/tests/components/tag/test_trigger.py b/tests/components/tag/test_trigger.py index 60d45abb7b9..5c7e515d322 100644 --- a/tests/components/tag/test_trigger.py +++ b/tests/components/tag/test_trigger.py @@ -11,8 +11,6 @@ from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.setup import async_setup_component -from tests.common import async_mock_service - @pytest.fixture(autouse=True, name="stub_blueprint_populate") def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @@ -39,14 +37,8 @@ def tag_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_triggers( - hass: HomeAssistant, tag_setup, calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] ) -> None: """Test tag triggers.""" assert await tag_setup() @@ -75,9 +67,9 @@ async def test_triggers( await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" - assert calls[0].data["id"] == 0 + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" + assert service_calls[0].data["id"] == 0 await hass.services.async_call( automation.DOMAIN, @@ -85,15 +77,16 @@ async def test_triggers( {ATTR_ENTITY_ID: "automation.test"}, blocking=True, ) + assert len(service_calls) == 2 await async_scan_tag(hass, "abc123", None) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_exception_bad_trigger( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test for exception on event triggers firing.""" @@ -117,7 +110,7 @@ async def test_exception_bad_trigger( async def test_multiple_tags_and_devices_trigger( - hass: HomeAssistant, tag_setup, calls: list[ServiceCall] + hass: HomeAssistant, tag_setup, service_calls: list[ServiceCall] ) -> None: """Test multiple tags and devices triggers.""" assert await tag_setup() @@ -158,8 +151,8 @@ async def test_multiple_tags_and_devices_trigger( await async_scan_tag(hass, "def456", device_id="jkl0123") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[0].data["message"] == "service called" - assert calls[1].data["message"] == "service called" - assert calls[2].data["message"] == "service called" - assert calls[3].data["message"] == "service called" + assert len(service_calls) == 4 + assert service_calls[0].data["message"] == "service called" + assert service_calls[1].data["message"] == "service called" + assert service_calls[2].data["message"] == "service called" + assert service_calls[3].data["message"] == "service called" diff --git a/tests/components/tailscale/conftest.py b/tests/components/tailscale/conftest.py index cb7419daf89..5514678f530 100644 --- a/tests/components/tailscale/conftest.py +++ b/tests/components/tailscale/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from tailscale.models import Devices -from typing_extensions import Generator from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/tailwind/conftest.py b/tests/components/tailwind/conftest.py index f23463548bc..ea87c120308 100644 --- a/tests/components/tailwind/conftest.py +++ b/tests/components/tailwind/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations -from unittest.mock import AsyncMock, MagicMock, patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch from gotailwind import TailwindDeviceStatus import pytest -from typing_extensions import Generator from homeassistant.components.tailwind.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_TOKEN @@ -36,7 +36,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[None]: """Mock setting up a config entry.""" with patch( "homeassistant.components.tailwind.async_setup_entry", return_value=True diff --git a/tests/components/tailwind/snapshots/test_binary_sensor.ambr b/tests/components/tailwind/snapshots/test_binary_sensor.ambr index 20a3282db55..064b391c43a 100644 --- a/tests/components/tailwind/snapshots/test_binary_sensor.ambr +++ b/tests/components/tailwind/snapshots/test_binary_sensor.ambr @@ -68,6 +68,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 1', 'name_by_user': None, 'primary_config_entry': , @@ -146,6 +147,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_button.ambr b/tests/components/tailwind/snapshots/test_button.ambr index 3ddbbb3f81d..17b656ec5fd 100644 --- a/tests/components/tailwind/snapshots/test_button.ambr +++ b/tests/components/tailwind/snapshots/test_button.ambr @@ -72,6 +72,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_cover.ambr b/tests/components/tailwind/snapshots/test_cover.ambr index 4ac6d6adc7d..b69bd9e6410 100644 --- a/tests/components/tailwind/snapshots/test_cover.ambr +++ b/tests/components/tailwind/snapshots/test_cover.ambr @@ -69,6 +69,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 1', 'name_by_user': None, 'primary_config_entry': , @@ -148,6 +149,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Door 2', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tailwind/snapshots/test_number.ambr b/tests/components/tailwind/snapshots/test_number.ambr index b4e73f4b2aa..3e2e0577ad5 100644 --- a/tests/components/tailwind/snapshots/test_number.ambr +++ b/tests/components/tailwind/snapshots/test_number.ambr @@ -81,6 +81,7 @@ }), 'manufacturer': 'Tailwind', 'model': 'iQ3', + 'model_id': None, 'name': 'Tailwind iQ3', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tami4/conftest.py b/tests/components/tami4/conftest.py index 84b96c04735..2b4acac0b3f 100644 --- a/tests/components/tami4/conftest.py +++ b/tests/components/tami4/conftest.py @@ -1,5 +1,6 @@ """Common fixutres with default mocks as well as common test helper methods.""" +from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, MagicMock, patch @@ -7,7 +8,6 @@ import pytest from Tami4EdgeAPI.device import Device from Tami4EdgeAPI.device_metadata import DeviceMetadata from Tami4EdgeAPI.water_quality import UV, Filter, WaterQuality -from typing_extensions import Generator from homeassistant.components.tami4.const import CONF_REFRESH_TOKEN, DOMAIN from homeassistant.core import HomeAssistant @@ -60,6 +60,31 @@ def mock__get_devices_metadata(request: pytest.FixtureRequest) -> Generator[None yield +@pytest.fixture +def mock__get_devices_metadata_no_name( + request: pytest.FixtureRequest, +) -> Generator[None]: + """Fixture to mock _get_devices which makes a call to the API.""" + + side_effect = getattr(request, "param", None) + + device_metadata = DeviceMetadata( + id=1, + name=None, + connected=True, + psn="psn", + type="type", + device_firmware="v1.1", + ) + + with patch( + "Tami4EdgeAPI.Tami4EdgeAPI.Tami4EdgeAPI._get_devices_metadata", + return_value=[device_metadata], + side_effect=side_effect, + ): + yield + + @pytest.fixture def mock_get_device( request: pytest.FixtureRequest, diff --git a/tests/components/tami4/test_config_flow.py b/tests/components/tami4/test_config_flow.py index 4210c391d70..4dfc27bba94 100644 --- a/tests/components/tami4/test_config_flow.py +++ b/tests/components/tami4/test_config_flow.py @@ -120,6 +120,39 @@ async def test_step_otp_valid( assert "refresh_token" in result["data"] +@pytest.mark.usefixtures( + "mock_setup_entry", + "mock_request_otp", + "mock_submit_otp", + "mock__get_devices_metadata_no_name", +) +async def test_step_otp_valid_device_no_name(hass: HomeAssistant) -> None: + """Test user step with valid phone number.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PHONE: "+972555555555"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"otp": "123456"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Tami4" + assert "refresh_token" in result["data"] + + @pytest.mark.parametrize( ("mock_submit_otp", "expected_error"), [ diff --git a/tests/components/tankerkoenig/conftest.py b/tests/components/tankerkoenig/conftest.py index 8f2e2c2fb53..1517c3d2060 100644 --- a/tests/components/tankerkoenig/conftest.py +++ b/tests/components/tankerkoenig/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Tankerkoenig integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tankerkoenig import DOMAIN from homeassistant.const import CONF_SHOW_ON_MAP diff --git a/tests/components/tankerkoenig/test_diagnostics.py b/tests/components/tankerkoenig/test_diagnostics.py index 441268659f3..e7b479a0c32 100644 --- a/tests/components/tankerkoenig/test_diagnostics.py +++ b/tests/components/tankerkoenig/test_diagnostics.py @@ -4,6 +4,7 @@ from __future__ import annotations import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,4 +22,4 @@ async def test_entry_diagnostics( ) -> None: """Test config entry diagnostics.""" result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index 07ca8b31825..48cd4012f07 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -10,35 +10,11 @@ from homeassistant.components.tasmota.const import ( DEFAULT_PREFIX, DOMAIN, ) -from homeassistant.core import HomeAssistant, ServiceCall -from tests.common import ( - MockConfigEntry, - async_mock_service, - mock_device_registry, - mock_registry, -) +from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 -@pytest.fixture -def device_reg(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture -def entity_reg(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) def disable_debounce(): """Set MQTT debounce timer to zero.""" diff --git a/tests/components/tasmota/test_device_trigger.py b/tests/components/tasmota/test_device_trigger.py index 450ad678ff6..bb474358006 100644 --- a/tests/components/tasmota/test_device_trigger.py +++ b/tests/components/tasmota/test_device_trigger.py @@ -30,8 +30,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: async def test_get_triggers_btn( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -46,7 +45,7 @@ async def test_get_triggers_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -77,8 +76,7 @@ async def test_get_triggers_btn( async def test_get_triggers_swc( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -90,7 +88,7 @@ async def test_get_triggers_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) expected_triggers = [ @@ -112,8 +110,7 @@ async def test_get_triggers_swc( async def test_get_unknown_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -126,7 +123,7 @@ async def test_get_unknown_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -161,8 +158,7 @@ async def test_get_unknown_triggers( async def test_get_non_existing_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -175,7 +171,7 @@ async def test_get_non_existing_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -187,8 +183,7 @@ async def test_get_non_existing_triggers( @pytest.mark.no_fail_on_log_exception async def test_discover_bad_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -207,7 +202,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -243,7 +238,7 @@ async def test_discover_bad_triggers( ) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) triggers = await async_get_device_automations( @@ -274,8 +269,7 @@ async def test_discover_bad_triggers( async def test_update_remove_triggers( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -296,7 +290,7 @@ async def test_update_remove_triggers( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -351,8 +345,8 @@ async def test_update_remove_triggers( async def test_if_fires_on_mqtt_message_btn( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -366,7 +360,7 @@ async def test_if_fires_on_mqtt_message_btn( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -412,22 +406,22 @@ async def test_if_fires_on_mqtt_message_btn( hass, "tasmota_49A3BC/stat/RESULT", '{"Button1":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press_1" # Fake button 3 single press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Button3":{"Action":"SINGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "short_press_3" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "short_press_3" async def test_if_fires_on_mqtt_message_swc( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -442,7 +436,7 @@ async def test_if_fires_on_mqtt_message_swc( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -502,30 +496,30 @@ async def test_if_fires_on_mqtt_message_swc( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press_1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press_1" # Fake switch 2 short press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch2":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "short_press_2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "short_press_2" # Fake switch 3 long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "long_press_3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "long_press_3" async def test_if_fires_on_mqtt_message_late_discover( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -544,7 +538,7 @@ async def test_if_fires_on_mqtt_message_late_discover( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -593,22 +587,22 @@ async def test_if_fires_on_mqtt_message_late_discover( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "short_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "short_press" # Fake long press. async_fire_mqtt_message( hass, "tasmota_49A3BC/stat/RESULT", '{"custom_switch":{"Action":"HOLD"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "double_press" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "double_press" async def test_if_fires_on_mqtt_message_after_update( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -624,7 +618,7 @@ async def test_if_fires_on_mqtt_message_after_update( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -656,7 +650,7 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Update the trigger with different topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -666,13 +660,13 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 # Update the trigger with same topic async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) @@ -682,17 +676,20 @@ async def test_if_fires_on_mqtt_message_after_update( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async_fire_mqtt_message( hass, "tasmota_49A3BC/status/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 3 + assert len(service_calls) == 3 async def test_no_resubscribe_same_topic( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test subscription to topics without change.""" # Discover a device with device trigger @@ -705,7 +702,7 @@ async def test_no_resubscribe_same_topic( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -741,8 +738,8 @@ async def test_no_resubscribe_same_topic( async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass: HomeAssistant, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -757,7 +754,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -789,7 +786,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the trigger config["swc"][0] = -1 @@ -800,7 +797,7 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Rediscover the trigger config["swc"][0] = 0 @@ -811,14 +808,14 @@ async def test_not_fires_on_mqtt_message_after_remove_by_mqtt( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_reg, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -834,7 +831,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -866,7 +863,7 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 # Remove the device await remove_device(hass, hass_ws_client, device_entry.id) @@ -876,11 +873,14 @@ async def test_not_fires_on_mqtt_message_after_remove_from_registry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device with device trigger @@ -893,14 +893,14 @@ async def test_attach_remove( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -925,8 +925,8 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -937,11 +937,14 @@ async def test_attach_remove( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove_late( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -956,14 +959,14 @@ async def test_attach_remove_late( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -988,7 +991,7 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config2)) await hass.async_block_till_done() @@ -998,8 +1001,8 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the trigger remove() @@ -1010,11 +1013,14 @@ async def test_attach_remove_late( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_attach_remove_late2( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of trigger.""" # Discover a device without device triggers @@ -1029,14 +1035,14 @@ async def test_attach_remove_late2( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) remove = await async_initialize_triggers( hass, @@ -1068,11 +1074,14 @@ async def test_attach_remove_late2( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_attach_remove_unknown1( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test attach and removal of unknown trigger.""" # Discover a device without device triggers @@ -1083,7 +1092,7 @@ async def test_attach_remove_unknown1( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1113,7 +1122,7 @@ async def test_attach_remove_unknown1( async def test_attach_unknown_remove_device_from_registry( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - device_reg, + device_registry: dr.DeviceRegistry, mqtt_mock: MqttMockHAClient, setup_tasmota, ) -> None: @@ -1136,7 +1145,7 @@ async def test_attach_unknown_remove_device_from_registry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config1)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) @@ -1164,7 +1173,10 @@ async def test_attach_unknown_remove_device_from_registry( async def test_attach_remove_config_entry( - hass: HomeAssistant, device_reg, mqtt_mock: MqttMockHAClient, setup_tasmota + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mqtt_mock: MqttMockHAClient, + setup_tasmota, ) -> None: """Test trigger cleanup when removing a Tasmota config entry.""" # Discover a device with device trigger @@ -1177,14 +1189,14 @@ async def test_attach_remove_config_entry( async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{mac}/config", json.dumps(config)) await hass.async_block_till_done() - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) - calls = [] + service_calls = [] def callback(trigger, context): - calls.append(trigger["trigger"]["description"]) + service_calls.append(trigger["trigger"]["description"]) await async_initialize_triggers( hass, @@ -1209,8 +1221,8 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0] == "event 'tasmota_event'" + assert len(service_calls) == 1 + assert service_calls[0] == "event 'tasmota_event'" # Remove the Tasmota config entry config_entries = hass.config_entries.async_entries("tasmota") @@ -1222,4 +1234,4 @@ async def test_attach_remove_config_entry( hass, "tasmota_49A3BC/stat/RESULT", '{"Switch1":{"Action":"TOGGLE"}}' ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 diff --git a/tests/components/tasmota/test_discovery.py b/tests/components/tasmota/test_discovery.py index 91832f1f2f0..35ea79f7749 100644 --- a/tests/components/tasmota/test_discovery.py +++ b/tests/components/tasmota/test_discovery.py @@ -124,9 +124,8 @@ async def test_invalid_mac( async def test_correct_config_discovery( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, ) -> None: """Test receiving valid discovery message.""" @@ -142,11 +141,11 @@ async def test_correct_config_discovery( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None - entity_entry = entity_reg.async_get("switch.tasmota_test") + entity_entry = entity_registry.async_get("switch.tasmota_test") assert entity_entry is not None state = hass.states.get("switch.tasmota_test") @@ -159,9 +158,7 @@ async def test_correct_config_discovery( async def test_device_discover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test setting up a device.""" @@ -176,7 +173,7 @@ async def test_device_discover( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -190,9 +187,7 @@ async def test_device_discover( async def test_device_discover_deprecated( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test setting up a device with deprecated discovery message.""" @@ -207,7 +202,7 @@ async def test_device_discover_deprecated( await hass.async_block_till_done() # Verify device and registry entries are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -220,9 +215,7 @@ async def test_device_discover_deprecated( async def test_device_update( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test updating a device.""" @@ -240,7 +233,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -258,7 +251,7 @@ async def test_device_update( await hass.async_block_till_done() # Verify device entry is updated - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -270,9 +263,7 @@ async def test_device_update( async def test_device_remove( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -287,7 +278,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -300,7 +291,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -309,9 +300,7 @@ async def test_device_remove( async def test_device_remove_multiple_config_entries_1( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -321,7 +310,7 @@ async def test_device_remove_multiple_config_entries_1( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -336,7 +325,7 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -350,7 +339,7 @@ async def test_device_remove_multiple_config_entries_1( await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -360,9 +349,7 @@ async def test_device_remove_multiple_config_entries_1( async def test_device_remove_multiple_config_entries_2( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device.""" @@ -372,12 +359,12 @@ async def test_device_remove_multiple_config_entries_2( mock_entry = MockConfigEntry(domain="test") mock_entry.add_to_hass(hass) - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) - other_device_entry = device_reg.async_get_or_create( + other_device_entry = device_registry.async_get_or_create( config_entry_id=mock_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "other_device")}, ) @@ -392,7 +379,7 @@ async def test_device_remove_multiple_config_entries_2( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -400,13 +387,13 @@ async def test_device_remove_multiple_config_entries_2( assert other_device_entry.id != device_entry.id # Remove other config entry from the device - device_reg.async_update_device( + device_registry.async_update_device( device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() # Verify device entry is not removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -414,7 +401,7 @@ async def test_device_remove_multiple_config_entries_2( mqtt_mock.async_publish.assert_not_called() # Remove other config entry from the other device - Tasmota should not do any cleanup - device_reg.async_update_device( + device_registry.async_update_device( other_device_entry.id, remove_config_entry_id=mock_entry.entry_id ) await hass.async_block_till_done() @@ -425,8 +412,7 @@ async def test_device_remove_stale( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a stale (undiscovered) device does not throw.""" @@ -436,13 +422,13 @@ async def test_device_remove_stale( config_entry = hass.config_entries.async_entries("tasmota")[0] # Create a device - device_reg.async_get_or_create( + device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) # Verify device entry was created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -451,7 +437,7 @@ async def test_device_remove_stale( await remove_device(hass, hass_ws_client, device_entry.id) # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -460,9 +446,7 @@ async def test_device_remove_stale( async def test_device_rediscover( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a device.""" @@ -477,7 +461,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created - device_entry1 = device_reg.async_get_device( + device_entry1 = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry1 is not None @@ -490,7 +474,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -503,7 +487,7 @@ async def test_device_rediscover( await hass.async_block_till_done() # Verify device entry is created, and id is reused - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -576,9 +560,8 @@ async def test_entity_duplicate_removal( async def test_same_topic( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -605,7 +588,7 @@ async def test_same_topic( # Verify device registry entries are created for both devices for config in configs[0:2]: - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -616,14 +599,14 @@ async def test_same_topic( assert device_entry.sw_version == config["sw"] # Verify entities are created only for the first device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[0]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 - device_entry = device_reg.async_get_device( + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_duplicated_tasmota_49A3BC/cmnd/" @@ -639,7 +622,7 @@ async def test_same_topic( await hass.async_block_till_done() # Verify device registry entries was created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) assert device_entry is not None @@ -650,10 +633,10 @@ async def test_same_topic( assert device_entry.sw_version == configs[2]["sw"] # Verify no entities were created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -669,10 +652,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the third device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[2]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been updated issue = issue_registry.async_get_issue("tasmota", issue_id) @@ -688,10 +671,10 @@ async def test_same_topic( await hass.async_block_till_done() # Verify entities are created also for the second device - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, configs[1]["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert issue_registry.async_get_issue("tasmota", issue_id) is None @@ -700,9 +683,8 @@ async def test_same_topic( async def test_topic_no_prefix( hass: HomeAssistant, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, setup_tasmota, issue_registry: ir.IssueRegistry, ) -> None: @@ -719,7 +701,7 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify device registry entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) assert device_entry is not None @@ -730,10 +712,10 @@ async def test_topic_no_prefix( assert device_entry.sw_version == config["sw"] # Verify entities are not created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 0 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 0 # Verify a repairs issue was created issue_id = "topic_no_prefix_00000049A3BC" @@ -749,10 +731,10 @@ async def test_topic_no_prefix( await hass.async_block_till_done() # Verify entities are created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, config["mac"])} ) - assert len(er.async_entries_for_device(entity_reg, device_entry.id, True)) == 1 + assert len(er.async_entries_for_device(entity_registry, device_entry.id, True)) == 1 # Verify the repairs issue has been removed assert ("tasmota", issue_id) not in issue_registry.issues diff --git a/tests/components/tasmota/test_fan.py b/tests/components/tasmota/test_fan.py index 654b8c955d2..49d1d36ce20 100644 --- a/tests/components/tasmota/test_fan.py +++ b/tests/components/tasmota/test_fan.py @@ -61,7 +61,12 @@ async def test_controlling_state_via_mqtt( state = hass.states.get("fan.tasmota") assert state.state == STATE_OFF assert state.attributes["percentage"] is None - assert state.attributes["supported_features"] == fan.FanEntityFeature.SET_SPEED + assert ( + state.attributes["supported_features"] + == fan.FanEntityFeature.SET_SPEED + | fan.FanEntityFeature.TURN_OFF + | fan.FanEntityFeature.TURN_ON + ) assert not state.attributes.get(ATTR_ASSUMED_STATE) async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/STATE", '{"FanSpeed":1}') diff --git a/tests/components/tasmota/test_init.py b/tests/components/tasmota/test_init.py index 0123421d5ae..125dba811e6 100644 --- a/tests/components/tasmota/test_init.py +++ b/tests/components/tasmota/test_init.py @@ -4,8 +4,6 @@ import copy import json from unittest.mock import call -import pytest - from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -26,9 +24,7 @@ async def test_device_remove( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, - caplog: pytest.LogCaptureFixture, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, setup_tasmota, ) -> None: """Test removing a discovered device through device registry.""" @@ -44,7 +40,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -53,7 +49,7 @@ async def test_device_remove( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -70,7 +66,7 @@ async def test_device_remove( async def test_device_remove_non_tasmota_device( hass: HomeAssistant, - device_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -92,7 +88,7 @@ async def test_device_remove_non_tasmota_device( config_entry.add_to_hass(hass) mac = "12:34:56:AB:CD:EF" - device_entry = device_reg.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -102,7 +98,7 @@ async def test_device_remove_non_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -113,7 +109,7 @@ async def test_device_remove_non_tasmota_device( async def test_device_remove_stale_tasmota_device( hass: HomeAssistant, - device_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -123,7 +119,7 @@ async def test_device_remove_stale_tasmota_device( config_entry = hass.config_entries.async_entries("tasmota")[0] mac = "12:34:56:AB:CD:EF" - device_entry = device_reg.async_get_or_create( + device_entry = device_registry.async_get_or_create( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, mac)}, ) @@ -133,7 +129,7 @@ async def test_device_remove_stale_tasmota_device( await hass.async_block_till_done() # Verify device entry is removed - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None @@ -144,8 +140,7 @@ async def test_device_remove_stale_tasmota_device( async def test_tasmota_ws_remove_discovered_device( hass: HomeAssistant, - device_reg, - entity_reg, + device_registry: dr.DeviceRegistry, hass_ws_client: WebSocketGenerator, mqtt_mock: MqttMockHAClient, setup_tasmota, @@ -159,7 +154,7 @@ async def test_tasmota_ws_remove_discovered_device( await hass.async_block_till_done() # Verify device entry is created - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is not None @@ -170,7 +165,7 @@ async def test_tasmota_ws_remove_discovered_device( ) # Verify device entry is cleared - device_entry = device_reg.async_get_device( + device_entry = device_registry.async_get_device( connections={(dr.CONNECTION_NETWORK_MAC, mac)} ) assert device_entry is None diff --git a/tests/components/technove/conftest.py b/tests/components/technove/conftest.py index be34ebfefa5..a81575f1edf 100644 --- a/tests/components/technove/conftest.py +++ b/tests/components/technove/conftest.py @@ -1,10 +1,10 @@ """Fixtures for TechnoVE integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest from technove import Station as TechnoVEStation -from typing_extensions import Generator from homeassistant.components.technove.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/technove/fixtures/station_charging.json b/tests/components/technove/fixtures/station_charging.json index ea98dc0b071..63e68d0db0e 100644 --- a/tests/components/technove/fixtures/station_charging.json +++ b/tests/components/technove/fixtures/station_charging.json @@ -11,7 +11,7 @@ "normalPeriodActive": false, "maxChargePourcentage": 0.9, "isBatteryProtected": false, - "inSharingMode": true, + "inSharingMode": false, "energySession": 12.34, "energyTotal": 1234, "version": "1.82", diff --git a/tests/components/technove/snapshots/test_binary_sensor.ambr b/tests/components/technove/snapshots/test_binary_sensor.ambr index 140526b9391..cc2dcf4a04a 100644 --- a/tests/components/technove/snapshots/test_binary_sensor.ambr +++ b/tests/components/technove/snapshots/test_binary_sensor.ambr @@ -181,7 +181,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'on', + 'state': 'off', }) # --- # name: test_sensors[binary_sensor.technove_station_static_ip-entry] diff --git a/tests/components/technove/snapshots/test_diagnostics.ambr b/tests/components/technove/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..2e81f124ba5 --- /dev/null +++ b/tests/components/technove/snapshots/test_diagnostics.ambr @@ -0,0 +1,36 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'auto_charge': True, + 'conflict_in_sharing_config': False, + 'current': 23.75, + 'energy_session': 12.34, + 'energy_total': 1234, + 'high_charge_period_active': False, + 'in_sharing_mode': False, + 'is_battery_protected': False, + 'is_session_active': True, + 'is_static_ip': False, + 'is_up_to_date': True, + 'last_charge': ''' + 1701072080,0,17.39 + + ''', + 'mac_address': '**REDACTED**', + 'max_charge_percentage': 0.9, + 'max_current': 24, + 'max_station_current': 32, + 'name': 'TechnoVE Station', + 'network_ssid': 'Connecting...', + 'normal_period_active': False, + 'rssi': -82, + 'status': dict({ + '__type': "", + 'repr': "", + }), + 'time': 1701000000, + 'version': '1.82', + 'voltage_in': 238, + 'voltage_out': 238, + }) +# --- diff --git a/tests/components/technove/snapshots/test_number.ambr b/tests/components/technove/snapshots/test_number.ambr new file mode 100644 index 00000000000..622c04d542a --- /dev/null +++ b/tests/components/technove/snapshots/test_number.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_numbers[number.technove_station_maximum_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 32, + 'min': 8, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.technove_station_maximum_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maximum current', + 'platform': 'technove', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_current', + 'unique_id': 'AA:AA:AA:AA:AA:BB_max_current', + 'unit_of_measurement': None, + }) +# --- +# name: test_numbers[number.technove_station_maximum_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'TechnoVE Station Maximum current', + 'max': 32, + 'min': 8, + 'mode': , + 'step': 1, + }), + 'context': , + 'entity_id': 'number.technove_station_maximum_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '24', + }) +# --- diff --git a/tests/components/technove/test_diagnostics.py b/tests/components/technove/test_diagnostics.py new file mode 100644 index 00000000000..878b084c0c3 --- /dev/null +++ b/tests/components/technove/test_diagnostics.py @@ -0,0 +1,22 @@ +"""Tests for TechnoVE diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + init_integration: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, init_integration) + == snapshot + ) diff --git a/tests/components/technove/test_number.py b/tests/components/technove/test_number.py new file mode 100644 index 00000000000..c9f39cd9200 --- /dev/null +++ b/tests/components/technove/test_number.py @@ -0,0 +1,201 @@ +"""Tests for the TechnoVE number platform.""" + +from unittest.mock import MagicMock + +import pytest +from syrupy.assertion import SnapshotAssertion +from technove import TechnoVEConnectionError, TechnoVEError + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "mock_technove") +async def test_numbers( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the creation and values of the TechnoVE numbers.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.NUMBER]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +@pytest.mark.parametrize( + ("entity_id", "method", "called_with_value"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + {"max_current": 10}, + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_number_expected_value( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, + called_with_value: dict[str, bool | int], +) -> None: + """Test set value services with valid values.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: called_with_value["max_current"]}, + blocking=True, + ) + + assert method_mock.call_count == 1 + method_mock.assert_called_with(**called_with_value) + + +@pytest.mark.parametrize( + ("entity_id", "value"), + [ + ( + "number.technove_station_maximum_current", + 1, + ), + ( + "number.technove_station_maximum_current", + 1000, + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_number_out_of_bound( + hass: HomeAssistant, + entity_id: str, + value: float, +) -> None: + """Test set value services with out of bound values.""" + state = hass.states.get(entity_id) + + with pytest.raises(ServiceValidationError, match="is outside valid range"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: value}, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.usefixtures("init_integration") +async def test_set_max_current_sharing_mode( + hass: HomeAssistant, + mock_technove: MagicMock, +) -> None: + """Test failure to set the max current when the station is in sharing mode.""" + entity_id = "number.technove_station_maximum_current" + state = hass.states.get(entity_id) + + # Enable power sharing mode + device = mock_technove.update.return_value + device.info.in_sharing_mode = True + + with pytest.raises( + ServiceValidationError, + match="power sharing mode is enabled", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 10, + }, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_invalid_response( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, +) -> None: + """Test invalid response, not becoming unavailable.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + method_mock.side_effect = TechnoVEError + with pytest.raises(HomeAssistantError, match="Invalid response from TechnoVE API"): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, + blocking=True, + ) + + assert method_mock.call_count == 1 + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ( + "number.technove_station_maximum_current", + "set_max_current", + ), + ], +) +@pytest.mark.usefixtures("init_integration") +async def test_connection_error( + hass: HomeAssistant, + mock_technove: MagicMock, + entity_id: str, + method: str, +) -> None: + """Test connection error, leading to becoming unavailable.""" + state = hass.states.get(entity_id) + method_mock = getattr(mock_technove, method) + + method_mock.side_effect = TechnoVEConnectionError + with pytest.raises( + HomeAssistantError, match="Error communicating with TechnoVE API" + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: state.entity_id, ATTR_VALUE: 10}, + blocking=True, + ) + + assert method_mock.call_count == 1 + assert (state := hass.states.get(state.entity_id)) + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/tedee/conftest.py b/tests/components/tedee/conftest.py index 295e34fd541..68444de640c 100644 --- a/tests/components/tedee/conftest.py +++ b/tests/components/tedee/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator import json from unittest.mock import AsyncMock, MagicMock, patch from pytedee_async.bridge import TedeeBridge from pytedee_async.lock import TedeeLock import pytest -from typing_extensions import Generator from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID diff --git a/tests/components/tedee/snapshots/test_binary_sensor.ambr b/tests/components/tedee/snapshots/test_binary_sensor.ambr index 8c9dca1bd12..385e4ac9bc1 100644 --- a/tests/components/tedee/snapshots/test_binary_sensor.ambr +++ b/tests/components/tedee/snapshots/test_binary_sensor.ambr @@ -32,6 +32,39 @@ 'unit_of_measurement': None, }) # --- +# name: test_binary_sensors[entry-lock_uncalibrated] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lock uncalibrated', + 'platform': 'tedee', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uncalibrated', + 'unique_id': '12345-uncalibrated', + 'unit_of_measurement': None, + }) +# --- # name: test_binary_sensors[entry-pullspring_enabled] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -112,6 +145,20 @@ 'state': 'off', }) # --- +# name: test_binary_sensors[state-lock_uncalibrated] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Lock-1A2B Lock uncalibrated', + }), + 'context': , + 'entity_id': 'binary_sensor.lock_1a2b_lock_uncalibrated', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensors[state-pullspring_enabled] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/tedee/snapshots/test_init.ambr b/tests/components/tedee/snapshots/test_init.ambr index c91fb3ca484..20d6bfcdc2a 100644 --- a/tests/components/tedee/snapshots/test_init.ambr +++ b/tests/components/tedee/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Bridge', + 'model_id': None, 'name': 'Bridge-AB1C', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tedee/snapshots/test_lock.ambr b/tests/components/tedee/snapshots/test_lock.ambr index 8fa8ab7668d..14913e32ba5 100644 --- a/tests/components/tedee/snapshots/test_lock.ambr +++ b/tests/components/tedee/snapshots/test_lock.ambr @@ -68,6 +68,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee PRO', + 'model_id': None, 'name': 'Lock-1A2B', 'name_by_user': None, 'primary_config_entry': , @@ -146,6 +147,7 @@ }), 'manufacturer': 'Tedee', 'model': 'Tedee GO', + 'model_id': None, 'name': 'Lock-2C3D', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tedee/test_binary_sensor.py b/tests/components/tedee/test_binary_sensor.py index ee8c318d2dd..788d31c84d2 100644 --- a/tests/components/tedee/test_binary_sensor.py +++ b/tests/components/tedee/test_binary_sensor.py @@ -15,20 +15,17 @@ from tests.common import async_fire_time_changed pytestmark = pytest.mark.usefixtures("init_integration") -BINARY_SENSORS = ( - "charging", - "semi_locked", - "pullspring_enabled", -) +BINARY_SENSORS = ("charging", "semi_locked", "pullspring_enabled", "lock_uncalibrated") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, ) -> None: - """Test tedee battery charging sensor.""" + """Test tedee binary sensor.""" for key in BINARY_SENSORS: state = hass.states.get(f"binary_sensor.lock_1a2b_{key}") assert state @@ -39,6 +36,7 @@ async def test_binary_sensors( assert entry == snapshot(name=f"entry-{key}") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_new_binary_sensors( hass: HomeAssistant, mock_tedee: MagicMock, diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index 588e63f693b..d5dc5d4efcf 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -10,7 +10,7 @@ from pytedee_async import ( import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -143,3 +143,44 @@ async def test_reauth_flow( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" + + +async def test_reconfigure_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_tedee: MagicMock +) -> None: + """Test that the reconfigure flow works.""" + + mock_config_entry.add_to_hass(hass) + + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data={ + CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, + CONF_HOST: "192.168.1.42", + }, + ) + + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "reconfigure_confirm" + + result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + {CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, CONF_HOST: "192.168.1.43"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(mock_config_entry.entry_id) + assert entry + assert entry.title == "My Tedee" + assert entry.data == { + CONF_HOST: "192.168.1.43", + CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, + CONF_WEBHOOK_ID: WEBHOOK_ID, + } diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index ffc4a8c30d6..741bc3156cb 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -25,7 +25,7 @@ from homeassistant.components.lock import ( STATE_UNLOCKING, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -276,10 +276,21 @@ async def test_new_lock( assert state +@pytest.mark.parametrize( + ("lib_state", "expected_state"), + [ + (TedeeLockState.LOCKED, STATE_LOCKED), + (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), + (TedeeLockState.UNKNOWN, STATE_UNKNOWN), + (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), + ], +) async def test_webhook_update( hass: HomeAssistant, mock_tedee: MagicMock, hass_client_no_auth: ClientSessionGenerator, + lib_state: TedeeLockState, + expected_state: str, ) -> None: """Test updated data set through webhook.""" @@ -287,10 +298,9 @@ async def test_webhook_update( assert state assert state.state == STATE_UNLOCKED - webhook_data = {"dummystate": 6} - mock_tedee.locks_dict[ - 12345 - ].state = TedeeLockState.LOCKED # is updated in the lib, so mock and assert in L296 + webhook_data = {"dummystate": lib_state.value} + # is updated in the lib, so mock and assert below + mock_tedee.locks_dict[12345].state = lib_state client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -302,4 +312,4 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_LOCKED + assert state.state == expected_state diff --git a/tests/components/template/snapshots/test_button.ambr b/tests/components/template/snapshots/test_button.ambr new file mode 100644 index 00000000000..3d96ad66050 --- /dev/null +++ b/tests/components/template/snapshots/test_button.ambr @@ -0,0 +1,28 @@ +# serializer version: 1 +# name: test_setup_config_entry[config_entry_extra_options0] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'button.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_config_entry[config_entry_extra_options1] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'button.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr new file mode 100644 index 00000000000..d4cabb2900f --- /dev/null +++ b/tests/components/template/snapshots/test_select.ambr @@ -0,0 +1,19 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'options': Wrapper([ + 'off', + 'on', + 'auto', + ]), + }), + 'context': , + 'entity_id': 'select.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- \ No newline at end of file diff --git a/tests/components/template/snapshots/test_switch.ambr b/tests/components/template/snapshots/test_switch.ambr new file mode 100644 index 00000000000..c240a9436a0 --- /dev/null +++ b/tests/components/template/snapshots/test_switch.ambr @@ -0,0 +1,14 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + }), + 'context': , + 'entity_id': 'switch.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index 50cad5be9e1..eb51b3f53b4 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -1,5 +1,6 @@ """The tests for the Template Binary sensor platform.""" +from copy import deepcopy from datetime import UTC, datetime, timedelta import logging from unittest.mock import patch @@ -995,20 +996,32 @@ async def test_availability_icon_picture( ], ) @pytest.mark.parametrize( - ("extra_config", "restored_state", "initial_state"), + ("extra_config", "source_state", "restored_state", "initial_state"), [ - ({}, ON, OFF), - ({}, OFF, OFF), - ({}, STATE_UNAVAILABLE, OFF), - ({}, STATE_UNKNOWN, OFF), - ({"delay_off": 5}, ON, ON), - ({"delay_off": 5}, OFF, OFF), - ({"delay_off": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, ON, ON), - ({"delay_on": 5}, OFF, OFF), - ({"delay_on": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, OFF, ON, OFF), + ({}, OFF, OFF, OFF), + ({}, OFF, STATE_UNAVAILABLE, OFF), + ({}, OFF, STATE_UNKNOWN, OFF), + ({"delay_off": 5}, OFF, ON, ON), + ({"delay_off": 5}, OFF, OFF, OFF), + ({"delay_off": 5}, OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, OFF, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, OFF, ON, OFF), + ({"delay_on": 5}, OFF, OFF, OFF), + ({"delay_on": 5}, OFF, STATE_UNAVAILABLE, OFF), + ({"delay_on": 5}, OFF, STATE_UNKNOWN, OFF), + ({}, ON, ON, ON), + ({}, ON, OFF, ON), + ({}, ON, STATE_UNAVAILABLE, ON), + ({}, ON, STATE_UNKNOWN, ON), + ({"delay_off": 5}, ON, ON, ON), + ({"delay_off": 5}, ON, OFF, ON), + ({"delay_off": 5}, ON, STATE_UNAVAILABLE, ON), + ({"delay_off": 5}, ON, STATE_UNKNOWN, ON), + ({"delay_on": 5}, ON, ON, ON), + ({"delay_on": 5}, ON, OFF, OFF), + ({"delay_on": 5}, ON, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, ON, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1017,18 +1030,20 @@ async def test_restore_state( domain, config, extra_config, + source_state, restored_state, initial_state, ) -> None: """Test restoring template binary sensor.""" + hass.states.async_set("sensor.test_state", source_state) fake_state = State( "binary_sensor.test", restored_state, {}, ) mock_restore_cache(hass, (fake_state,)) - config = dict(config) + config = deepcopy(config) config["template"]["binary_sensor"].update(**extra_config) with assert_setup_component(count, domain): assert await async_setup_component( diff --git a/tests/components/template/test_button.py b/tests/components/template/test_button.py index c861c7874d4..72c3d2351f5 100644 --- a/tests/components/template/test_button.py +++ b/tests/components/template/test_button.py @@ -3,9 +3,12 @@ import datetime as dt from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.template import DOMAIN from homeassistant.components.template.button import DEFAULT_NAME from homeassistant.const import ( CONF_DEVICE_CLASS, @@ -15,14 +18,58 @@ from homeassistant.const import ( STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component +from tests.common import MockConfigEntry, assert_setup_component _TEST_BUTTON = "button.template_button" _TEST_OPTIONS_BUTTON = "button.test" +@pytest.mark.parametrize( + "config_entry_extra_options", + [ + {}, + { + "device_class": "update", + }, + ], +) +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + config_entry_extra_options: dict[str, str], +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "button", + "press": [ + { + "service": "input_boolean.toggle", + "metadata": {}, + "data": {}, + "target": {"entity_id": "input_boolean.test"}, + } + ], + } + | config_entry_extra_options, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("button.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -197,3 +244,49 @@ def _verify( state = hass.states.get(entity_id) assert state.state == expected_value assert state.attributes == attributes + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for button template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "button", + "device_id": device_entry.id, + "press": [ + { + "service": "input_boolean.toggle", + "metadata": {}, + "data": {}, + "target": {"entity_id": "input_boolean.test"}, + } + ], + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("button.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index f277b918661..ff5db52d667 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -31,7 +31,9 @@ from tests.typing import WebSocketGenerator [ ( "binary_sensor", - "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", + { + "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" + }, "on", {"one": "on", "two": "off"}, {}, @@ -41,7 +43,9 @@ from tests.typing import WebSocketGenerator ), ( "sensor", - "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", + { + "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" + }, "50.0", {"one": "30.0", "two": "20.0"}, {}, @@ -49,8 +53,67 @@ from tests.typing import WebSocketGenerator {}, {}, ), + ( + "button", + {}, + "unknown", + {"one": "30.0", "two": "20.0"}, + {}, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + {}, + ), + ( + "image", + {"url": "{{ states('sensor.one') }}"}, + "2024-07-09T00:00:00+00:00", + {"one": "http://www.test.com", "two": ""}, + {}, + {"verify_ssl": True}, + {"verify_ssl": True}, + {}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {}, + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + "on", + {"one": "on", "two": "off"}, + {}, + {}, + {}, + {}, + ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_config_flow( hass: HomeAssistant, template_type, @@ -91,7 +154,7 @@ async def test_config_flow( result["flow_id"], { "name": "My template", - "state": state_template, + **state_template, **extra_input, }, ) @@ -102,8 +165,8 @@ async def test_config_flow( assert result["data"] == {} assert result["options"] == { "name": "My template", - "state": state_template, "template_type": template_type, + **state_template, **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -112,8 +175,8 @@ async def test_config_flow( assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": state_template, "template_type": template_type, + **state_template, **extra_options, } @@ -127,22 +190,56 @@ async def test_config_flow( ( "template_type", "state_template", + "extra_input", + "extra_options", ), [ ( "sensor", - "{{ 15 }}", + {"state": "{{ 15 }}"}, + {}, + {}, ), ( "binary_sensor", - "{{ false }}", + {"state": "{{ false }}"}, + {}, + {}, + ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, + ), + ( + "button", + {}, + {}, + {}, + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + {"verify_ssl": True}, + {"verify_ssl": True}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, ), ], ) async def test_config_flow_device( hass: HomeAssistant, template_type: str, - state_template: str, + state_template: dict[str, Any], + extra_input: dict[str, Any], + extra_options: dict[str, Any], device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -180,8 +277,9 @@ async def test_config_flow_device( result["flow_id"], { "name": "My template", - "state": state_template, "device_id": device_id, + **state_template, + **extra_input, }, ) await hass.async_block_till_done() @@ -191,9 +289,10 @@ async def test_config_flow_device( assert result["data"] == {} assert result["options"] == { "name": "My template", - "state": state_template, "template_type": template_type, "device_id": device_id, + **state_template, + **extra_options, } assert len(mock_setup_entry.mock_calls) == 1 @@ -201,9 +300,10 @@ async def test_config_flow_device( assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": state_template, "template_type": template_type, "device_id": device_id, + **state_template, + **extra_options, } @@ -214,8 +314,8 @@ def get_suggested(schema, key): if k.description is None or "suggested_value" not in k.description: return None return k.description["suggested_value"] - # Wanted key absent from schema - raise KeyError("Wanted key absent from schema") + # If the desired key is missing from the schema, return None + return None @pytest.mark.parametrize( @@ -227,28 +327,104 @@ def get_suggested(schema, key): "input_states", "extra_options", "options_options", + "key_template", ), [ ( "binary_sensor", - "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}", - "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}", + { + "state": "{{ states('binary_sensor.one') == 'on' or states('binary_sensor.two') == 'on' }}" + }, + { + "state": "{{ states('binary_sensor.one') == 'on' and states('binary_sensor.two') == 'on' }}" + }, ["on", "off"], {"one": "on", "two": "off"}, {}, {}, + "state", ), ( "sensor", - "{{ float(states('sensor.one')) + float(states('sensor.two')) }}", - "{{ float(states('sensor.one')) - float(states('sensor.two')) }}", + { + "state": "{{ float(states('sensor.one')) + float(states('sensor.two')) }}" + }, + { + "state": "{{ float(states('sensor.one')) - float(states('sensor.two')) }}" + }, ["50.0", "10.0"], {"one": "30.0", "two": "20.0"}, {}, {}, + "state", + ), + ( + "button", + {}, + {}, + ["unknown", "unknown"], + {"one": "30.0", "two": "20.0"}, + { + "device_class": "restart", + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + { + "press": [ + { + "service": "input_boolean.toggle", + "target": {"entity_id": "input_boolean.test"}, + "data": {}, + } + ], + }, + "state", + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + }, + { + "url": "{{ states('sensor.two') }}", + }, + ["2024-07-09T00:00:00+00:00", "2024-07-09T00:00:00+00:00"], + {"one": "http://www.test.com", "two": "http://www.test2.com"}, + {"verify_ssl": True}, + { + "url": "{{ states('sensor.two') }}", + "verify_ssl": True, + }, + "url", + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"state": "{{ states('select.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + "state", + ), + ( + "switch", + {"value_template": "{{ states('switch.one') }}"}, + {"value_template": "{{ states('switch.two') }}"}, + ["on", "off"], + {"one": "on", "two": "off"}, + {}, + {}, + "value_template", ), ], ) +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_options( hass: HomeAssistant, template_type, @@ -258,6 +434,7 @@ async def test_options( input_states, extra_options, options_options, + key_template, ) -> None: """Test reconfiguring.""" input_entities = ["one", "two"] @@ -272,8 +449,8 @@ async def test_options( domain=DOMAIN, options={ "name": "My template", - "state": old_state_template, "template_type": template_type, + **old_state_template, **extra_options, }, title="My template", @@ -291,25 +468,30 @@ async def test_options( result = await hass.config_entries.options.async_init(config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == template_type - assert get_suggested(result["data_schema"].schema, "state") == old_state_template + assert get_suggested( + result["data_schema"].schema, key_template + ) == old_state_template.get(key_template) assert "name" not in result["data_schema"].schema result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={"state": new_state_template, **options_options}, + user_input={ + **new_state_template, + **options_options, + }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "name": "My template", - "state": new_state_template, "template_type": template_type, + **new_state_template, **extra_options, } assert config_entry.data == {} assert config_entry.options == { "name": "My template", - "state": new_state_template, "template_type": template_type, + **new_state_template, **extra_options, } assert config_entry.title == "My template" @@ -334,7 +516,7 @@ async def test_options( assert result["step_id"] == template_type assert get_suggested(result["data_schema"].schema, "name") is None - assert get_suggested(result["data_schema"].schema, "state") is None + assert get_suggested(result["data_schema"].schema, key_template) is None @pytest.mark.parametrize( @@ -943,22 +1125,57 @@ async def test_option_flow_sensor_preview_config_entry_removed( ( "template_type", "state_template", + "extra_input", + "extra_options", ), [ ( "sensor", - "{{ 15 }}", + {"state": "{{ 15 }}"}, + {}, + {}, ), ( "binary_sensor", - "{{ false }}", + {"state": "{{ false }}"}, + {}, + {}, + ), + ( + "button", + {}, + {}, + {}, + ), + ( + "image", + { + "url": "{{ states('sensor.one') }}", + "verify_ssl": True, + }, + {}, + {}, + ), + ( + "select", + {"state": "{{ states('select.one') }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + {"options": "{{ ['off', 'on', 'auto'] }}"}, + ), + ( + "switch", + {"value_template": "{{ false }}"}, + {}, + {}, ), ], ) async def test_options_flow_change_device( hass: HomeAssistant, template_type: str, - state_template: str, + state_template: dict[str, Any], + extra_input: dict[str, Any], + extra_options: dict[str, Any], device_registry: dr.DeviceRegistry, ) -> None: """Test remove the device registry configuration entry when the device changes.""" @@ -992,11 +1209,12 @@ async def test_options_flow_change_device( domain=DOMAIN, options={ "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_options, }, - title="Sensor template", + title="Template", ) template_config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(template_config_entry.entry_id) @@ -1011,23 +1229,26 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, "device_id": device_id2, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id2, + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id2, + **state_template, + **extra_options, } # Remove link with device @@ -1039,20 +1260,23 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", + **state_template, + **extra_options, } # Change to link to device 1 @@ -1064,21 +1288,24 @@ async def test_options_flow_change_device( result = await hass.config_entries.options.async_configure( result["flow_id"], user_input={ - "state": state_template, "device_id": device_id1, + **state_template, + **extra_input, }, ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_input, } assert template_config_entry.data == {} assert template_config_entry.options == { "template_type": template_type, - "name": "Test", - "state": state_template, + "name": "My template", "device_id": device_id1, + **state_template, + **extra_options, } diff --git a/tests/components/template/test_image.py b/tests/components/template/test_image.py index bda9e2530ca..101b475956a 100644 --- a/tests/components/template/test_image.py +++ b/tests/components/template/test_image.py @@ -8,6 +8,7 @@ import httpx from PIL import Image import pytest import respx +from syrupy.assertion import SnapshotAssertion from homeassistant import setup from homeassistant.components.input_text import ( @@ -15,12 +16,13 @@ from homeassistant.components.input_text import ( DOMAIN as INPUT_TEXT_DOMAIN, SERVICE_SET_VALUE as INPUT_TEXT_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ENTITY_PICTURE, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util -from tests.common import assert_setup_component +from tests.common import MockConfigEntry, assert_setup_component from tests.typing import ClientSessionGenerator _DEFAULT = object() @@ -74,6 +76,39 @@ async def _assert_state( assert body == expected_image +@respx.mock +@pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + imgbytes_jpg, +) -> None: + """Test the config flow.""" + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("image.my_template") + assert state is not None + assert state.state == "2024-07-09T00:00:00+00:00" + + @respx.mock @pytest.mark.freeze_time("2023-04-01 00:00:00+00:00") async def test_platform_config( @@ -503,3 +538,47 @@ async def test_trigger_image_custom_entity_picture( imgbytes_jpg, expected_entity_picture="http://example2.com", ) + + +@respx.mock +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for image template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + respx.get("http://example.com").respond( + stream=imgbytes_jpg, content_type="image/jpeg" + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "image", + "url": "http://example.com", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("image.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index d13fd9035b0..fe08e1f4963 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -1,4 +1,4 @@ -"""The test for the Template sensor platform.""" +"""Test for Template helper.""" from datetime import timedelta from unittest.mock import patch @@ -7,9 +7,9 @@ import pytest from homeassistant import config from homeassistant.components.template import DOMAIN +from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.helpers.reload import SERVICE_RELOAD from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -271,13 +271,87 @@ async def async_yaml_patch_helper(hass, filename): await hass.async_block_till_done() +@pytest.mark.parametrize( + ( + "config_entry_options", + "config_user_input", + ), + [ + ( + { + "name": "My template", + "state": "{{10}}", + "template_type": "sensor", + }, + { + "state": "{{12}}", + }, + ), + ( + { + "template_type": "binary_sensor", + "name": "My template", + "state": "{{1 == 1}}", + }, + { + "state": "{{1 == 2}}", + }, + ), + ( + { + "template_type": "image", + "name": "My template", + "url": "http://example.com", + }, + { + "url": "http://example.com", + }, + ), + ( + { + "template_type": "button", + "name": "My template", + }, + {}, + ), + ( + { + "template_type": "select", + "name": "My template", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + { + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + ), + ( + { + "template_type": "switch", + "name": "My template", + "value_template": "{{ true }}", + }, + { + "value_template": "{{ true }}", + }, + ), + ], +) async def test_change_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + config_entry_options: dict[str, str], + config_user_input: dict[str, str], ) -> None: - """Test remove the device registry configuration entry when the device changes.""" + """Test the link between the device and the config entry. - # Configure a device registry + Test, for each platform, that the device was linked to the + config entry and the link was removed when the device is + changed in the integration options. + """ + + # Configure devices registry entry_device1 = MockConfigEntry() entry_device1.add_to_hass(hass) device1 = device_registry.async_get_or_create( @@ -300,60 +374,57 @@ async def test_change_device( device_id2 = device2.id assert device_id2 is not None - # Setup the config entry (binary_sensor) - sensor_config_entry = MockConfigEntry( + # Setup the config entry + template_config_entry = MockConfigEntry( data={}, domain=DOMAIN, - options={ - "template_type": "binary_sensor", - "name": "Teste", - "state": "{{15}}", - "device_id": device_id1, - }, - title="Binary sensor template", + options=config_entry_options | {"device_id": device_id1}, + title="Template", ) - sensor_config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(sensor_config_entry.entry_id) + template_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(template_config_entry.entry_id) await hass.async_block_till_done() - # Confirm that the configuration entry has been added to the device 1 registry (current) + # Confirm that the config entry has been added to the device 1 registry (current) current_device = device_registry.async_get(device_id=device_id1) - assert sensor_config_entry.entry_id in current_device.config_entries + assert template_config_entry.entry_id in current_device.config_entries - # Change configuration options to use device 2 and reload the integration - result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) + # Change config options to use device 2 and reload the integration + result = await hass.config_entries.options.async_init( + template_config_entry.entry_id + ) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ - "state": "{{15}}", - "device_id": device_id2, - }, + user_input=config_user_input | {"device_id": device_id2}, ) await hass.async_block_till_done() - # Confirm that the configuration entry has been removed from the device 1 registry (previous) + # Confirm that the config entry has been removed from the device 1 registry previous_device = device_registry.async_get(device_id=device_id1) - assert sensor_config_entry.entry_id not in previous_device.config_entries + assert template_config_entry.entry_id not in previous_device.config_entries - # Confirm that the configuration entry has been added to the device 2 registry (current) + # Confirm that the config entry has been added to the device 2 registry (current) current_device = device_registry.async_get(device_id=device_id2) - assert sensor_config_entry.entry_id in current_device.config_entries + assert template_config_entry.entry_id in current_device.config_entries - result = await hass.config_entries.options.async_init(sensor_config_entry.entry_id) + # Change the config options to remove the device and reload the integration + result = await hass.config_entries.options.async_init( + template_config_entry.entry_id + ) result = await hass.config_entries.options.async_configure( result["flow_id"], - user_input={ - "state": "{{15}}", - }, + user_input=config_user_input, ) await hass.async_block_till_done() - # Confirm that the configuration entry has been removed from the device 2 registry (previous) + # Confirm that the config entry has been removed from the device 2 registry previous_device = device_registry.async_get(device_id=device_id2) - assert sensor_config_entry.entry_id not in previous_device.config_entries + assert template_config_entry.entry_id not in previous_device.config_entries - # Confirm that there is no device with the helper configuration entry + # Confirm that there is no device with the helper config entry assert ( - dr.async_entries_for_config_entry(device_registry, sensor_config_entry.entry_id) + dr.async_entries_for_config_entry( + device_registry, template_config_entry.entry_id + ) == [] ) diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 4106abdd469..2268c0840aa 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -1,5 +1,7 @@ """The tests for the Template select platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_select import ( ATTR_OPTION as INPUT_SELECT_ATTR_OPTION, @@ -14,17 +16,45 @@ from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION as SELECT_SERVICE_SELECT_OPTION, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_SELECT = "select.template_select" # Represent for select's current_option _OPTION_INPUT_SELECT = "input_select.option" +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("select.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -428,3 +458,43 @@ async def test_template_icon_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_SELECT) assert state.state == "a" assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for select template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "select", + "state": "{{ 'on' }}", + "options": "{{ ['off', 'on', 'auto'] }}", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("select.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_switch.py b/tests/components/template/test_switch.py index 68cca990ef1..2fc0f29acaf 100644 --- a/tests/components/template/test_switch.py +++ b/tests/components/template/test_switch.py @@ -1,8 +1,10 @@ """The tests for the Template switch platform.""" import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant import setup +from homeassistant.components import template from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -13,9 +15,15 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import CoreState, HomeAssistant, ServiceCall, State +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, mock_component, mock_restore_cache +from tests.common import ( + MockConfigEntry, + assert_setup_component, + mock_component, + mock_restore_cache, +) OPTIMISTIC_SWITCH_CONFIG = { "turn_on": { @@ -35,6 +43,38 @@ OPTIMISTIC_SWITCH_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + hass.states.async_set( + "switch.one", + "on", + {}, + ) + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ states('switch.one') }}", + "template_type": SWITCH_DOMAIN, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("switch.my_template") + assert state is not None + assert state == snapshot + + async def test_template_state_text(hass: HomeAssistant) -> None: """Test the state text of a template.""" with assert_setup_component(1, "switch"): @@ -655,3 +695,42 @@ async def test_unique_id(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(hass.states.async_all("switch")) == 1 + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for Template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=template.DOMAIN, + options={ + "name": "My template", + "value_template": "{{ true }}", + "template_type": "switch", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("switch.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/tesla_fleet/__init__.py b/tests/components/tesla_fleet/__init__.py new file mode 100644 index 00000000000..78159402bff --- /dev/null +++ b/tests/components/tesla_fleet/__init__.py @@ -0,0 +1,74 @@ +"""Tests for the Tesla Fleet integration.""" + +from unittest.mock import patch + +from syrupy import SnapshotAssertion + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def setup_platform( + hass: HomeAssistant, + config_entry: MockConfigEntry, + platforms: list[Platform] | None = None, +) -> None: + """Set up the Tesla Fleet platform.""" + + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, "", "Home Assistant"), + DOMAIN, + ) + + config_entry.add_to_hass(hass) + + if platforms is None: + await hass.config_entries.async_setup(config_entry.entry_id) + else: + with patch("homeassistant.components.tesla_fleet.PLATFORMS", platforms): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def assert_entities( + hass: HomeAssistant, + entry_id: str, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all entities match their snapshot.""" + + entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") + + +def assert_entities_alt( + hass: HomeAssistant, + entry_id: str, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test that all entities match their alt snapshot.""" + entity_entries = er.async_entries_for_config_entry(entity_registry, entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert (state := hass.states.get(entity_entry.entity_id)) + assert state == snapshot(name=f"{entity_entry.entity_id}-statealt") diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py new file mode 100644 index 00000000000..7d60ae5e174 --- /dev/null +++ b/tests/components/tesla_fleet/conftest.py @@ -0,0 +1,124 @@ +"""Fixtures for Tessie.""" + +from __future__ import annotations + +from collections.abc import Generator +from copy import deepcopy +import time +from unittest.mock import AsyncMock, patch + +import jwt +import pytest + +from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES + +from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE + +from tests.common import MockConfigEntry + +UID = "abc-123" + + +@pytest.fixture(name="expires_at") +def mock_expires_at() -> int: + """Fixture to set the oauth token expiration time.""" + return time.time() + 3600 + + +@pytest.fixture(name="scopes") +def mock_scopes() -> list[str]: + """Fixture to set the scopes present in the OAuth token.""" + return SCOPES + + +@pytest.fixture +def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant.""" + + access_token = jwt.encode( + { + "sub": UID, + "aud": [], + "scp": scopes, + "ou_code": "NA", + }, + key="", + algorithm="none", + ) + + return MockConfigEntry( + domain=DOMAIN, + title=UID, + unique_id=UID, + data={ + "auth_implementation": DOMAIN, + "token": { + "status": 0, + "userid": UID, + "access_token": access_token, + "refresh_token": "mock-refresh-token", + "expires_at": expires_at, + "scope": ",".join(scopes), + }, + }, + ) + + +@pytest.fixture(autouse=True) +def mock_products() -> Generator[AsyncMock]: + """Mock Tesla Fleet Api products method.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.products", + return_value=PRODUCTS, + ) as mock_products: + yield mock_products + + +@pytest.fixture(autouse=True) +def mock_vehicle_state() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific vehicle method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle", + return_value=VEHICLE_ONLINE, + ) as mock_vehicle: + yield mock_vehicle + + +@pytest.fixture(autouse=True) +def mock_vehicle_data() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.vehicle_data", + return_value=VEHICLE_DATA, + ) as mock_vehicle_data: + yield mock_vehicle_data + + +@pytest.fixture(autouse=True) +def mock_wake_up() -> Generator[AsyncMock]: + """Mock Tesla Fleet API Vehicle Specific wake_up method.""" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.wake_up", + return_value=VEHICLE_ONLINE, + ) as mock_wake_up: + yield mock_wake_up + + +@pytest.fixture(autouse=True) +def mock_live_status() -> Generator[AsyncMock]: + """Mock Teslemetry Energy Specific live_status method.""" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.live_status", + side_effect=lambda: deepcopy(LIVE_STATUS), + ) as mock_live_status: + yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_site_info() -> Generator[AsyncMock]: + """Mock Teslemetry Energy Specific site_info method.""" + with patch( + "homeassistant.components.tesla_fleet.EnergySpecific.site_info", + side_effect=lambda: deepcopy(SITE_INFO), + ) as mock_live_status: + yield mock_live_status diff --git a/tests/components/tesla_fleet/const.py b/tests/components/tesla_fleet/const.py new file mode 100644 index 00000000000..76b4ae20092 --- /dev/null +++ b/tests/components/tesla_fleet/const.py @@ -0,0 +1,28 @@ +"""Constants for the Tesla Fleet tests.""" + +from homeassistant.components.tesla_fleet.const import DOMAIN, TeslaFleetState + +from tests.common import load_json_object_fixture + +VEHICLE_ONLINE = {"response": {"state": TeslaFleetState.ONLINE}, "error": None} +VEHICLE_ASLEEP = {"response": {"state": TeslaFleetState.ASLEEP}, "error": None} + +PRODUCTS = load_json_object_fixture("products.json", DOMAIN) +VEHICLE_DATA = load_json_object_fixture("vehicle_data.json", DOMAIN) +VEHICLE_DATA_ALT = load_json_object_fixture("vehicle_data_alt.json", DOMAIN) +LIVE_STATUS = load_json_object_fixture("live_status.json", DOMAIN) +SITE_INFO = load_json_object_fixture("site_info.json", DOMAIN) + +COMMAND_OK = {"response": {"result": True, "reason": ""}} +COMMAND_REASON = {"response": {"result": False, "reason": "already closed"}} +COMMAND_IGNORED_REASON = {"response": {"result": False, "reason": "already_set"}} +COMMAND_NOREASON = {"response": {"result": False}} # Unexpected +COMMAND_ERROR = { + "response": None, + "error": "vehicle unavailable: vehicle is offline or asleep", + "error_description": "", +} +COMMAND_NOERROR = {"answer": 42} +COMMAND_ERRORS = (COMMAND_REASON, COMMAND_NOREASON, COMMAND_ERROR, COMMAND_NOERROR) + +RESPONSE_OK = {"response": {}, "error": None} diff --git a/tests/components/tesla_fleet/fixtures/live_status.json b/tests/components/tesla_fleet/fixtures/live_status.json new file mode 100644 index 00000000000..486f9f4fadd --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/live_status.json @@ -0,0 +1,33 @@ +{ + "response": { + "solar_power": 1185, + "energy_left": 38896.47368421053, + "total_pack_energy": 40727, + "percentage_charged": 95.50537403739663, + "backup_capable": true, + "battery_power": 5060, + "load_power": 6245, + "grid_status": "Active", + "grid_services_active": false, + "grid_power": 0, + "grid_services_power": 0, + "generator_power": 0, + "island_status": "on_grid", + "storm_mode_active": false, + "timestamp": "2024-01-01T00:00:00+00:00", + "wall_connectors": [ + { + "din": "abd-123", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + }, + { + "din": "bcd-234", + "wall_connector_state": 2, + "wall_connector_fault_state": 2, + "wall_connector_power": 0 + } + ] + } +} diff --git a/tests/components/tesla_fleet/fixtures/products.json b/tests/components/tesla_fleet/fixtures/products.json new file mode 100644 index 00000000000..8da921a33f4 --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/products.json @@ -0,0 +1,131 @@ +{ + "response": [ + { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "display_name": "Test", + "option_codes": null, + "cached_data": null, + "granular_access": { "hide_private": false }, + "tokens": ["abc", "def"], + "state": "asleep", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": false, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": null, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705701487912, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "command_signing": "allowed", + "release_notes_supported": true + }, + { + "energy_site_id": 123456, + "resource_type": "battery", + "site_name": "Energy Site", + "id": "ABC123", + "gateway_id": "ABC123", + "asset_site_id": "c0ffee", + "warp_site_number": "GA123456", + "energy_left": 23286.105263157893, + "total_pack_energy": 40804, + "percentage_charged": 57.068192488868476, + "battery_type": "ac_powerwall", + "backup_capable": true, + "battery_power": 14990, + "go_off_grid_test_banner_enabled": null, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": null, + "vpp_tour_enabled": null, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": true, + "components": { + "battery": true, + "battery_type": "ac_powerwall", + "solar": true, + "solar_type": "pv_panel", + "grid": true, + "load_meter": true, + "market_type": "residential", + "wall_connectors": [ + { + "device_id": "abc-123", + "din": "123-abc", + "is_active": true + }, + { + "device_id": "bcd-234", + "din": "234-bcd", + "is_active": true + } + ] + }, + "features": { + "rate_plan_manager_no_pricing_constraint": true + } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } + } + ], + "count": 3 +} diff --git a/tests/components/tesla_fleet/fixtures/site_info.json b/tests/components/tesla_fleet/fixtures/site_info.json new file mode 100644 index 00000000000..60958bbabbb --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/site_info.json @@ -0,0 +1,127 @@ +{ + "response": { + "id": "1233-abcd", + "site_name": "Site", + "backup_reserve_percent": 0, + "default_real_mode": "self_consumption", + "installation_date": "2022-01-01T00:00:00+00:00", + "user_settings": { + "go_off_grid_test_banner_enabled": false, + "storm_mode_enabled": true, + "powerwall_onboarding_settings_set": true, + "powerwall_tesla_electric_interested_in": false, + "vpp_tour_enabled": true, + "sync_grid_alert_enabled": true, + "breaker_alert_enabled": false + }, + "components": { + "solar": true, + "solar_type": "pv_panel", + "battery": true, + "grid": true, + "backup": true, + "gateway": "teg", + "load_meter": true, + "tou_capable": true, + "storm_mode_capable": true, + "flex_energy_request_capable": false, + "car_charging_data_supported": false, + "off_grid_vehicle_charging_reserve_supported": true, + "vehicle_charging_performance_view_enabled": false, + "vehicle_charging_solar_offset_view_enabled": false, + "battery_solar_offset_view_enabled": true, + "solar_value_enabled": true, + "energy_value_header": "Energy Value", + "energy_value_subheader": "Estimated Value", + "energy_service_self_scheduling_enabled": true, + "show_grid_import_battery_source_cards": true, + "set_islanding_mode_enabled": true, + "wifi_commissioning_enabled": true, + "backup_time_remaining_enabled": true, + "battery_type": "ac_powerwall", + "configurable": true, + "grid_services_enabled": false, + "gateways": [ + { + "device_id": "gateway-id", + "din": "gateway-din", + "serial_number": "CN00000000J50D", + "part_number": "1152100-14-J", + "part_type": 10, + "part_name": "Tesla Backup Gateway 2", + "is_active": true, + "site_id": "1234-abcd", + "firmware_version": "24.4.0 0fe780c9", + "updated_datetime": "2024-05-14T00:00:00.000Z" + } + ], + "batteries": [ + { + "device_id": "battery-1-id", + "din": "battery-1-din", + "serial_number": "TG000000001DA5", + "part_number": "3012170-10-B", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + }, + { + "device_id": "battery-2-id", + "din": "battery-2-din", + "serial_number": "TG000000002DA5", + "part_number": "3012170-05-C", + "part_type": 2, + "part_name": "Powerwall 2", + "nameplate_max_charge_power": 5000, + "nameplate_max_discharge_power": 5000, + "nameplate_energy": 13500 + } + ], + "wall_connectors": [ + { + "device_id": "123abc", + "din": "abd-123", + "part_name": "Gen 3 Wall Connector", + "is_active": true + }, + { + "device_id": "234bcd", + "din": "bcd-234", + "part_name": "Gen 3 Wall Connector", + "is_active": true + } + ], + "disallow_charge_from_grid_with_solar_installed": true, + "customer_preferred_export_rule": "pv_only", + "net_meter_mode": "battery_ok", + "system_alerts_enabled": true + }, + "version": "23.44.0 eb113390", + "battery_count": 2, + "tou_settings": { + "optimization_strategy": "economics", + "schedule": [ + { + "target": "off_peak", + "week_days": [1, 0], + "start_seconds": 0, + "end_seconds": 3600 + }, + { + "target": "peak", + "week_days": [1, 0], + "start_seconds": 3600, + "end_seconds": 0 + } + ] + }, + "nameplate_power": 15000, + "nameplate_energy": 40500, + "installation_time_zone": "", + "max_site_meter_power_ac": 1000000000, + "min_site_meter_power_ac": -1000000000, + "vpp_backup_reserve_percent": 0 + } +} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data.json b/tests/components/tesla_fleet/fixtures/vehicle_data.json new file mode 100644 index 00000000000..3845ae48559 --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/vehicle_data.json @@ -0,0 +1,282 @@ +{ + "response": { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "granular_access": { + "hide_private": false + }, + "tokens": ["abc", "def"], + "state": "online", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "charge_state": { + "battery_heater_on": false, + "battery_level": 77, + "battery_range": 266.87, + "charge_amps": 16, + "charge_current_request": 16, + "charge_current_request_max": 16, + "charge_enable_request": true, + "charge_energy_added": 0, + "charge_limit_soc": 80, + "charge_limit_soc_max": 100, + "charge_limit_soc_min": 50, + "charge_limit_soc_std": 80, + "charge_miles_added_ideal": 0, + "charge_miles_added_rated": 0, + "charge_port_cold_weather_mode": false, + "charge_port_color": "", + "charge_port_door_open": true, + "charge_port_latch": "Engaged", + "charge_rate": 0, + "charger_actual_current": 0, + "charger_phases": null, + "charger_pilot_current": 16, + "charger_power": 0, + "charger_voltage": 2, + "charging_state": "Stopped", + "conn_charge_cable": "IEC", + "est_battery_range": 275.04, + "fast_charger_brand": "", + "fast_charger_present": false, + "fast_charger_type": "ACSingleWireCAN", + "ideal_battery_range": 266.87, + "max_range_charge_counter": 0, + "minutes_to_full_charge": 0, + "not_enough_power_to_heat": null, + "off_peak_charging_enabled": false, + "off_peak_charging_times": "all_week", + "off_peak_hours_end_time": 900, + "preconditioning_enabled": false, + "preconditioning_times": "all_week", + "scheduled_charging_mode": "Off", + "scheduled_charging_pending": false, + "scheduled_charging_start_time": null, + "scheduled_charging_start_time_app": 600, + "scheduled_departure_time": 1704837600, + "scheduled_departure_time_minutes": 480, + "supercharger_session_trip_planner": false, + "time_to_full_charge": 0, + "timestamp": 1705707520649, + "trip_charging": false, + "usable_battery_level": 77, + "user_charge_enable_request": null + }, + "climate_state": { + "allow_cabin_overheat_protection": true, + "auto_seat_climate_left": true, + "auto_seat_climate_right": true, + "auto_steering_wheel_heat": false, + "battery_heater": false, + "battery_heater_no_power": null, + "cabin_overheat_protection": "On", + "cabin_overheat_protection_actively_cooling": false, + "climate_keeper_mode": "keep", + "cop_activation_temperature": "High", + "defrost_mode": 0, + "driver_temp_setting": 22, + "fan_status": 0, + "hvac_auto_request": "On", + "inside_temp": 29.8, + "is_auto_conditioning_on": false, + "is_climate_on": true, + "is_front_defroster_on": false, + "is_preconditioning": false, + "is_rear_defroster_on": false, + "left_temp_direction": 251, + "max_avail_temp": 28, + "min_avail_temp": 15, + "outside_temp": 30, + "passenger_temp_setting": 22, + "remote_heater_control_enabled": false, + "right_temp_direction": 251, + "seat_heater_left": 0, + "seat_heater_rear_center": 0, + "seat_heater_rear_left": 0, + "seat_heater_rear_right": 0, + "seat_heater_right": 0, + "side_mirror_heaters": false, + "steering_wheel_heat_level": 0, + "steering_wheel_heater": false, + "supports_fan_only_cabin_overheat_protection": true, + "timestamp": 1705707520649, + "wiper_blade_heater": false + }, + "drive_state": { + "active_route_latitude": 30.2226265, + "active_route_longitude": -97.6236871, + "active_route_miles_to_arrival": 0.039491, + "active_route_minutes_to_arrival": 0.103577, + "active_route_traffic_minutes_delay": 0, + "gps_as_of": 1701129612, + "heading": 185, + "latitude": -30.222626, + "longitude": -97.6236871, + "native_latitude": -30.222626, + "native_location_supported": 1, + "native_longitude": -97.6236871, + "native_type": "wgs", + "power": -7, + "shift_state": null, + "speed": null, + "timestamp": 1705707520649 + }, + "gui_settings": { + "gui_24_hour_time": false, + "gui_charge_rate_units": "kW", + "gui_distance_units": "km/hr", + "gui_range_display": "Rated", + "gui_temperature_units": "C", + "gui_tirepressure_units": "Psi", + "show_range_units": false, + "timestamp": 1705707520649 + }, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": true, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": true, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705707520649, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "vehicle_state": { + "api_version": 71, + "autopark_state_v2": "unavailable", + "calendar_supported": true, + "car_version": "2023.44.30.8 06f534d46010", + "center_display_state": 0, + "dashcam_clip_save_available": true, + "dashcam_state": "Recording", + "df": 0, + "dr": 0, + "fd_window": 0, + "feature_bitmask": "fbdffbff,187f", + "fp_window": 0, + "ft": 0, + "is_user_present": false, + "locked": false, + "media_info": { + "a2dp_source_name": "Pixel 8 Pro", + "audio_volume": 1.6667, + "audio_volume_increment": 0.333333, + "audio_volume_max": 10.333333, + "media_playback_status": "Playing", + "now_playing_album": "Elon Musk", + "now_playing_artist": "Walter Isaacson", + "now_playing_duration": 651000, + "now_playing_elapsed": 1000, + "now_playing_source": "Audible", + "now_playing_station": "Elon Musk", + "now_playing_title": "Chapter 51: Cybertruck: Tesla, 2018–2019" + }, + "media_state": { + "remote_control_enabled": true + }, + "notifications_supported": true, + "odometer": 6481.019282, + "parsed_calendar_supported": true, + "pf": 0, + "pr": 0, + "rd_window": 0, + "remote_start": false, + "remote_start_enabled": true, + "remote_start_supported": true, + "rp_window": 0, + "rt": 0, + "santa_mode": 0, + "sentry_mode": false, + "sentry_mode_available": true, + "service_mode": false, + "service_mode_plus": false, + "software_update": { + "download_perc": 100, + "expected_duration_sec": 2700, + "install_perc": 1, + "status": "available", + "version": "2024.12.0.0" + }, + "speed_limit_mode": { + "active": false, + "current_limit_mph": 69, + "max_limit_mph": 120, + "min_limit_mph": 50, + "pin_code_set": true + }, + "sun_roof_state": "open", + "vehicle_state_sun_roof_percent_open": 20, + "timestamp": 1705707520649, + "tpms_hard_warning_fl": false, + "tpms_hard_warning_fr": false, + "tpms_hard_warning_rl": false, + "tpms_hard_warning_rr": false, + "tpms_last_seen_pressure_time_fl": 1705700812, + "tpms_last_seen_pressure_time_fr": 1705700793, + "tpms_last_seen_pressure_time_rl": 1705700794, + "tpms_last_seen_pressure_time_rr": 1705700823, + "tpms_pressure_fl": 2.775, + "tpms_pressure_fr": 2.8, + "tpms_pressure_rl": 2.775, + "tpms_pressure_rr": 2.775, + "tpms_rcp_front_value": 2.9, + "tpms_rcp_rear_value": 2.9, + "tpms_soft_warning_fl": false, + "tpms_soft_warning_fr": false, + "tpms_soft_warning_rl": false, + "tpms_soft_warning_rr": false, + "valet_mode": false, + "valet_pin_needed": false, + "vehicle_name": "Test", + "vehicle_self_test_progress": 0, + "vehicle_self_test_requested": false, + "webcam_available": true + } + } +} diff --git a/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json b/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json new file mode 100644 index 00000000000..76416982eba --- /dev/null +++ b/tests/components/tesla_fleet/fixtures/vehicle_data_alt.json @@ -0,0 +1,279 @@ +{ + "response": { + "id": 1234, + "user_id": 1234, + "vehicle_id": 1234, + "vin": "LRWXF7EK4KC700000", + "color": null, + "access_type": "OWNER", + "granular_access": { + "hide_private": false + }, + "tokens": ["abc", "def"], + "state": "online", + "in_service": false, + "id_s": "1234", + "calendar_enabled": true, + "api_version": 71, + "backseat_token": null, + "backseat_token_updated_at": null, + "ble_autopair_enrolled": false, + "charge_state": { + "battery_heater_on": true, + "battery_level": 77, + "battery_range": 266.87, + "charge_amps": 16, + "charge_current_request": 16, + "charge_current_request_max": 16, + "charge_enable_request": true, + "charge_energy_added": 0, + "charge_limit_soc": 80, + "charge_limit_soc_max": 100, + "charge_limit_soc_min": 50, + "charge_limit_soc_std": 80, + "charge_miles_added_ideal": 0, + "charge_miles_added_rated": 0, + "charge_port_cold_weather_mode": false, + "charge_port_color": "", + "charge_port_door_open": true, + "charge_port_latch": "Engaged", + "charge_rate": 0, + "charger_actual_current": 0, + "charger_phases": null, + "charger_pilot_current": 16, + "charger_power": 0, + "charger_voltage": 2, + "charging_state": "Stopped", + "conn_charge_cable": "IEC", + "est_battery_range": 275.04, + "fast_charger_brand": "", + "fast_charger_present": false, + "fast_charger_type": "ACSingleWireCAN", + "ideal_battery_range": 266.87, + "max_range_charge_counter": 0, + "minutes_to_full_charge": "bad value", + "not_enough_power_to_heat": null, + "off_peak_charging_enabled": false, + "off_peak_charging_times": "all_week", + "off_peak_hours_end_time": 900, + "preconditioning_enabled": false, + "preconditioning_times": "all_week", + "scheduled_charging_mode": "Off", + "scheduled_charging_pending": false, + "scheduled_charging_start_time": null, + "scheduled_charging_start_time_app": 600, + "scheduled_departure_time": 1704837600, + "scheduled_departure_time_minutes": 480, + "supercharger_session_trip_planner": false, + "time_to_full_charge": null, + "timestamp": null, + "trip_charging": false, + "usable_battery_level": 77, + "user_charge_enable_request": true + }, + "climate_state": { + "allow_cabin_overheat_protection": true, + "auto_seat_climate_left": false, + "auto_seat_climate_right": false, + "auto_steering_wheel_heat": false, + "battery_heater": true, + "battery_heater_no_power": null, + "cabin_overheat_protection": "Off", + "cabin_overheat_protection_actively_cooling": false, + "climate_keeper_mode": "off", + "cop_activation_temperature": "Low", + "defrost_mode": 0, + "driver_temp_setting": 22, + "fan_status": 0, + "hvac_auto_request": "On", + "inside_temp": 29.8, + "is_auto_conditioning_on": false, + "is_climate_on": false, + "is_front_defroster_on": false, + "is_preconditioning": false, + "is_rear_defroster_on": false, + "left_temp_direction": 251, + "max_avail_temp": 28, + "min_avail_temp": 15, + "outside_temp": 30, + "passenger_temp_setting": 22, + "remote_heater_control_enabled": false, + "right_temp_direction": 251, + "seat_heater_left": 0, + "seat_heater_rear_center": 0, + "seat_heater_rear_left": 0, + "seat_heater_rear_right": 0, + "seat_heater_right": 0, + "side_mirror_heaters": false, + "steering_wheel_heat_level": 0, + "steering_wheel_heater": false, + "supports_fan_only_cabin_overheat_protection": true, + "timestamp": 1705707520649, + "wiper_blade_heater": false + }, + "drive_state": { + "active_route_latitude": 30.2226265, + "active_route_longitude": -97.6236871, + "active_route_miles_to_arrival": 0, + "active_route_minutes_to_arrival": 0, + "active_route_traffic_minutes_delay": 0, + "gps_as_of": 1701129612, + "heading": 185, + "latitude": -30.222626, + "longitude": -97.6236871, + "native_latitude": -30.222626, + "native_location_supported": 1, + "native_longitude": -97.6236871, + "native_type": "wgs", + "power": -7, + "shift_state": null, + "speed": null, + "timestamp": 1705707520649 + }, + "gui_settings": { + "gui_24_hour_time": false, + "gui_charge_rate_units": "kW", + "gui_distance_units": "km/hr", + "gui_range_display": "Rated", + "gui_temperature_units": "C", + "gui_tirepressure_units": "Psi", + "show_range_units": false, + "timestamp": 1705707520649 + }, + "vehicle_config": { + "aux_park_lamps": "Eu", + "badge_version": 1, + "can_accept_navigation_requests": true, + "can_actuate_trunks": true, + "car_special_type": "base", + "car_type": "model3", + "charge_port_type": "CCS", + "cop_user_set_temp_supported": false, + "dashcam_clip_save_supported": true, + "default_charge_to_max": false, + "driver_assist": "TeslaAP3", + "ece_restrictions": false, + "efficiency_package": "M32021", + "eu_vehicle": true, + "exterior_color": "DeepBlue", + "exterior_trim": "Black", + "exterior_trim_override": "", + "has_air_suspension": false, + "has_ludicrous_mode": false, + "has_seat_cooling": false, + "headlamp_type": "Global", + "interior_trim_type": "White2", + "key_version": 2, + "motorized_charge_port": true, + "paint_color_override": "0,9,25,0.7,0.04", + "performance_package": "Base", + "plg": true, + "pws": true, + "rear_drive_unit": "PM216MOSFET", + "rear_seat_heaters": 1, + "rear_seat_type": 0, + "rhd": true, + "roof_color": "RoofColorGlass", + "seat_type": null, + "spoiler_type": "None", + "sun_roof_installed": null, + "supports_qr_pairing": false, + "third_row_seats": "None", + "timestamp": 1705707520649, + "trim_badging": "74d", + "use_range_badging": true, + "utc_offset": 36000, + "webcam_selfie_supported": true, + "webcam_supported": true, + "wheel_type": "Pinwheel18CapKit" + }, + "vehicle_state": { + "api_version": 71, + "autopark_state_v2": "unavailable", + "calendar_supported": true, + "car_version": "2023.44.30.8 06f534d46010", + "center_display_state": 0, + "dashcam_clip_save_available": true, + "dashcam_state": "Recording", + "df": 0, + "dr": 0, + "fd_window": 1, + "feature_bitmask": "fbdffbff,187f", + "fp_window": 1, + "ft": 1, + "is_user_present": true, + "locked": false, + "media_info": { + "audio_volume": 2.6667, + "audio_volume_increment": 0.333333, + "audio_volume_max": 10.333333, + "media_playback_status": "Stopped", + "now_playing_album": "", + "now_playing_artist": "", + "now_playing_duration": 0, + "now_playing_elapsed": 0, + "now_playing_source": "Spotify", + "now_playing_station": "", + "now_playing_title": "" + }, + "media_state": { + "remote_control_enabled": true + }, + "notifications_supported": true, + "odometer": 6481.019282, + "parsed_calendar_supported": true, + "pf": 0, + "pr": 0, + "rd_window": 1, + "remote_start": false, + "remote_start_enabled": true, + "remote_start_supported": true, + "rp_window": 1, + "rt": 1, + "santa_mode": 0, + "sentry_mode": false, + "sentry_mode_available": true, + "service_mode": false, + "service_mode_plus": false, + "software_update": { + "download_perc": 0, + "expected_duration_sec": 2700, + "install_perc": 1, + "status": "", + "version": " " + }, + "speed_limit_mode": { + "active": false, + "current_limit_mph": 69, + "max_limit_mph": 120, + "min_limit_mph": 50, + "pin_code_set": true + }, + "timestamp": 1705707520649, + "tpms_hard_warning_fl": false, + "tpms_hard_warning_fr": false, + "tpms_hard_warning_rl": false, + "tpms_hard_warning_rr": false, + "tpms_last_seen_pressure_time_fl": 1705700812, + "tpms_last_seen_pressure_time_fr": 1705700793, + "tpms_last_seen_pressure_time_rl": 1705700794, + "tpms_last_seen_pressure_time_rr": 1705700823, + "tpms_pressure_fl": 2.775, + "tpms_pressure_fr": 2.8, + "tpms_pressure_rl": 2.775, + "tpms_pressure_rr": 2.775, + "tpms_rcp_front_value": 2.9, + "tpms_rcp_rear_value": 2.9, + "tpms_soft_warning_fl": false, + "tpms_soft_warning_fr": false, + "tpms_soft_warning_rl": false, + "tpms_soft_warning_rr": false, + "valet_mode": false, + "valet_pin_needed": false, + "vehicle_name": "Test", + "vehicle_self_test_progress": 0, + "vehicle_self_test_requested": false, + "webcam_available": true + } + } +} diff --git a/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr new file mode 100644 index 00000000000..05ef4879de6 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_binary_sensors.ambr @@ -0,0 +1,1571 @@ +# serializer version: 1 +# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Backup capable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_capable', + 'unique_id': '123456-backup_capable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_backup_capable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services active', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_active', + 'unique_id': '123456-grid_services_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services enabled', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_grid_services_enabled', + 'unique_id': '123456-components_grid_services_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.energy_site_grid_services_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_battery_heater-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery heater', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_heater_on', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_heater_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_battery_heater-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Battery heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cabin overheat protection actively cooling', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_cabin_overheat_protection_actively_cooling', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection_actively_cooling', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_cabin_overheat_protection_actively_cooling-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Cabin overheat protection actively cooling', + }), + 'context': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charge_cable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge cable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_conn_charge_cable', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charge_cable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charger has multiple phases', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_phases', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_phases', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_charger_has_multiple_phases-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charger has multiple phases', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_dashcam-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_dashcam', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dashcam', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_dashcam_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dashcam_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_dashcam-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Dashcam', + }), + 'context': , + 'entity_id': 'binary_sensor.test_dashcam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front driver door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_df', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_df', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front driver window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_fd_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fd_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_driver_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front passenger door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_pf', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pf', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Front passenger window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_fp_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_fp_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_front_passenger_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Preconditioning', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_is_preconditioning', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_is_preconditioning', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Preconditioning enabled', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_preconditioning_enabled', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_preconditioning_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_preconditioning_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear driver door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_dr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_dr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear driver window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_rd_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rd_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_driver_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear passenger door', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_pr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_pr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_door-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Rear passenger window', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_rp_window', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_rp_window', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_rear_passenger_window-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Scheduled charging pending', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_scheduled_charging_pending', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_scheduled_charging_pending', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_scheduled_charging_pending-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Scheduled charging pending', + }), + 'context': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Status', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': 'LRWXF7EK4KC700000-state', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Status', + }), + 'context': , + 'entity_id': 'binary_sensor.test_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning front left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_fl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fl', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning front right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_fr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_fr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_front_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning rear left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_rl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rl', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure warning rear right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_soft_warning_rr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_soft_warning_rr', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_tire_pressure_warning_rear_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_trip_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Trip charging', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_trip_charging', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_trip_charging', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_trip_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Trip charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor[binary_sensor.test_user_present-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_user_present', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'User present', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_is_user_present', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_is_user_present', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensor[binary_sensor.test_user_present-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test User present', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_present', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_backup_capable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_active-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.energy_site_grid_services_enabled-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_battery_heater-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Battery heater', + }), + 'context': , + 'entity_id': 'binary_sensor.test_battery_heater', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_cabin_overheat_protection_actively_cooling-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'heat', + 'friendly_name': 'Test Cabin overheat protection actively cooling', + }), + 'context': , + 'entity_id': 'binary_sensor.test_cabin_overheat_protection_actively_cooling', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_charge_cable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_charger_has_multiple_phases-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charger has multiple phases', + }), + 'context': , + 'entity_id': 'binary_sensor.test_charger_has_multiple_phases', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_dashcam-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'Test Dashcam', + }), + 'context': , + 'entity_id': 'binary_sensor.test_dashcam', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_driver_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Front passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_front_passenger_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Front passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_front_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_preconditioning_enabled-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Preconditioning enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.test_preconditioning_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear driver door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_driver_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear driver window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_driver_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_door-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'door', + 'friendly_name': 'Test Rear passenger door', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_door', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_rear_passenger_window-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Rear passenger window', + }), + 'context': , + 'entity_id': 'binary_sensor.test_rear_passenger_window', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_scheduled_charging_pending-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Scheduled charging pending', + }), + 'context': , + 'entity_id': 'binary_sensor.test_scheduled_charging_pending', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_status-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Test Status', + }), + 'context': , + 'entity_id': 'binary_sensor.test_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_front_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning front right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear left', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_tire_pressure_warning_rear_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Test Tire pressure warning rear right', + }), + 'context': , + 'entity_id': 'binary_sensor.test_tire_pressure_warning_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_trip_charging-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Trip charging', + }), + 'context': , + 'entity_id': 'binary_sensor.test_trip_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensor_refresh[binary_sensor.test_user_present-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'presence', + 'friendly_name': 'Test User present', + }), + 'context': , + 'entity_id': 'binary_sensor.test_user_present', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..194eda6fcff --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_device_tracker.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_device_tracker[device_tracker.test_location-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.test_location', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Location', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'location', + 'unique_id': 'LRWXF7EK4KC700000-location', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.test_location-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Location', + 'gps_accuracy': 0, + 'latitude': -30.222626, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_location', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_device_tracker[device_tracker.test_route-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.test_route', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Route', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'route', + 'unique_id': 'LRWXF7EK4KC700000-route', + 'unit_of_measurement': None, + }) +# --- +# name: test_device_tracker[device_tracker.test_route-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Route', + 'gps_accuracy': 0, + 'latitude': 30.2226265, + 'longitude': -97.6236871, + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.test_route', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..902c7af131e --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_diagnostics.ambr @@ -0,0 +1,436 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'energysites': list([ + dict({ + 'info': dict({ + 'backup_reserve_percent': 0, + 'battery_count': 2, + 'components_backup': True, + 'components_backup_time_remaining_enabled': True, + 'components_batteries': list([ + dict({ + 'device_id': 'battery-1-id', + 'din': 'battery-1-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-10-B', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + dict({ + 'device_id': 'battery-2-id', + 'din': 'battery-2-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-05-C', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + ]), + 'components_battery': True, + 'components_battery_solar_offset_view_enabled': True, + 'components_battery_type': 'ac_powerwall', + 'components_car_charging_data_supported': False, + 'components_configurable': True, + 'components_customer_preferred_export_rule': 'pv_only', + 'components_disallow_charge_from_grid_with_solar_installed': True, + 'components_energy_service_self_scheduling_enabled': True, + 'components_energy_value_header': 'Energy Value', + 'components_energy_value_subheader': 'Estimated Value', + 'components_flex_energy_request_capable': False, + 'components_gateway': 'teg', + 'components_gateways': list([ + dict({ + 'device_id': 'gateway-id', + 'din': 'gateway-din', + 'firmware_version': '24.4.0 0fe780c9', + 'is_active': True, + 'part_name': 'Tesla Backup Gateway 2', + 'part_number': '1152100-14-J', + 'part_type': 10, + 'serial_number': '**REDACTED**', + 'site_id': '1234-abcd', + 'updated_datetime': '2024-05-14T00:00:00.000Z', + }), + ]), + 'components_grid': True, + 'components_grid_services_enabled': False, + 'components_load_meter': True, + 'components_net_meter_mode': 'battery_ok', + 'components_off_grid_vehicle_charging_reserve_supported': True, + 'components_set_islanding_mode_enabled': True, + 'components_show_grid_import_battery_source_cards': True, + 'components_solar': True, + 'components_solar_type': 'pv_panel', + 'components_solar_value_enabled': True, + 'components_storm_mode_capable': True, + 'components_system_alerts_enabled': True, + 'components_tou_capable': True, + 'components_vehicle_charging_performance_view_enabled': False, + 'components_vehicle_charging_solar_offset_view_enabled': False, + 'components_wall_connectors': list([ + dict({ + 'device_id': '123abc', + 'din': 'abd-123', + 'is_active': True, + 'part_name': 'Gen 3 Wall Connector', + }), + dict({ + 'device_id': '234bcd', + 'din': 'bcd-234', + 'is_active': True, + 'part_name': 'Gen 3 Wall Connector', + }), + ]), + 'components_wifi_commissioning_enabled': True, + 'default_real_mode': 'self_consumption', + 'id': '1233-abcd', + 'installation_date': '**REDACTED**', + 'installation_time_zone': '', + 'max_site_meter_power_ac': 1000000000, + 'min_site_meter_power_ac': -1000000000, + 'nameplate_energy': 40500, + 'nameplate_power': 15000, + 'site_name': 'Site', + 'tou_settings_optimization_strategy': 'economics', + 'tou_settings_schedule': list([ + dict({ + 'end_seconds': 3600, + 'start_seconds': 0, + 'target': 'off_peak', + 'week_days': list([ + 1, + 0, + ]), + }), + dict({ + 'end_seconds': 0, + 'start_seconds': 3600, + 'target': 'peak', + 'week_days': list([ + 1, + 0, + ]), + }), + ]), + 'user_settings_breaker_alert_enabled': False, + 'user_settings_go_off_grid_test_banner_enabled': False, + 'user_settings_powerwall_onboarding_settings_set': True, + 'user_settings_powerwall_tesla_electric_interested_in': False, + 'user_settings_storm_mode_enabled': True, + 'user_settings_sync_grid_alert_enabled': True, + 'user_settings_vpp_tour_enabled': True, + 'version': '23.44.0 eb113390', + 'vpp_backup_reserve_percent': 0, + }), + 'live': dict({ + 'backup_capable': True, + 'battery_power': 5060, + 'energy_left': 38896.47368421053, + 'generator_power': 0, + 'grid_power': 0, + 'grid_services_active': False, + 'grid_services_power': 0, + 'grid_status': 'Active', + 'island_status': 'on_grid', + 'load_power': 6245, + 'percentage_charged': 95.50537403739663, + 'solar_power': 1185, + 'storm_mode_active': False, + 'timestamp': '2024-01-01T00:00:00+00:00', + 'total_pack_energy': 40727, + 'wall_connectors': dict({ + 'abd-123': dict({ + 'din': 'abd-123', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + 'bcd-234': dict({ + 'din': 'bcd-234', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + }), + }), + }), + ]), + 'scopes': list([ + 'openid', + 'offline_access', + 'vehicle_device_data', + 'vehicle_cmds', + 'vehicle_charging_cmds', + 'energy_device_data', + 'energy_cmds', + ]), + 'vehicles': list([ + dict({ + 'data': dict({ + 'access_type': 'OWNER', + 'api_version': 71, + 'backseat_token': None, + 'backseat_token_updated_at': None, + 'ble_autopair_enrolled': False, + 'calendar_enabled': True, + 'charge_state_battery_heater_on': False, + 'charge_state_battery_level': 77, + 'charge_state_battery_range': 266.87, + 'charge_state_charge_amps': 16, + 'charge_state_charge_current_request': 16, + 'charge_state_charge_current_request_max': 16, + 'charge_state_charge_enable_request': True, + 'charge_state_charge_energy_added': 0, + 'charge_state_charge_limit_soc': 80, + 'charge_state_charge_limit_soc_max': 100, + 'charge_state_charge_limit_soc_min': 50, + 'charge_state_charge_limit_soc_std': 80, + 'charge_state_charge_miles_added_ideal': 0, + 'charge_state_charge_miles_added_rated': 0, + 'charge_state_charge_port_cold_weather_mode': False, + 'charge_state_charge_port_color': '', + 'charge_state_charge_port_door_open': True, + 'charge_state_charge_port_latch': 'Engaged', + 'charge_state_charge_rate': 0, + 'charge_state_charger_actual_current': 0, + 'charge_state_charger_phases': None, + 'charge_state_charger_pilot_current': 16, + 'charge_state_charger_power': 0, + 'charge_state_charger_voltage': 2, + 'charge_state_charging_state': 'Stopped', + 'charge_state_conn_charge_cable': 'IEC', + 'charge_state_est_battery_range': 275.04, + 'charge_state_fast_charger_brand': '', + 'charge_state_fast_charger_present': False, + 'charge_state_fast_charger_type': 'ACSingleWireCAN', + 'charge_state_ideal_battery_range': 266.87, + 'charge_state_max_range_charge_counter': 0, + 'charge_state_minutes_to_full_charge': 0, + 'charge_state_not_enough_power_to_heat': None, + 'charge_state_off_peak_charging_enabled': False, + 'charge_state_off_peak_charging_times': 'all_week', + 'charge_state_off_peak_hours_end_time': 900, + 'charge_state_preconditioning_enabled': False, + 'charge_state_preconditioning_times': 'all_week', + 'charge_state_scheduled_charging_mode': 'Off', + 'charge_state_scheduled_charging_pending': False, + 'charge_state_scheduled_charging_start_time': None, + 'charge_state_scheduled_charging_start_time_app': 600, + 'charge_state_scheduled_departure_time': 1704837600, + 'charge_state_scheduled_departure_time_minutes': 480, + 'charge_state_supercharger_session_trip_planner': False, + 'charge_state_time_to_full_charge': 0, + 'charge_state_timestamp': 1705707520649, + 'charge_state_trip_charging': False, + 'charge_state_usable_battery_level': 77, + 'charge_state_user_charge_enable_request': None, + 'climate_state_allow_cabin_overheat_protection': True, + 'climate_state_auto_seat_climate_left': True, + 'climate_state_auto_seat_climate_right': True, + 'climate_state_auto_steering_wheel_heat': False, + 'climate_state_battery_heater': False, + 'climate_state_battery_heater_no_power': None, + 'climate_state_cabin_overheat_protection': 'On', + 'climate_state_cabin_overheat_protection_actively_cooling': False, + 'climate_state_climate_keeper_mode': 'keep', + 'climate_state_cop_activation_temperature': 'High', + 'climate_state_defrost_mode': 0, + 'climate_state_driver_temp_setting': 22, + 'climate_state_fan_status': 0, + 'climate_state_hvac_auto_request': 'On', + 'climate_state_inside_temp': 29.8, + 'climate_state_is_auto_conditioning_on': False, + 'climate_state_is_climate_on': True, + 'climate_state_is_front_defroster_on': False, + 'climate_state_is_preconditioning': False, + 'climate_state_is_rear_defroster_on': False, + 'climate_state_left_temp_direction': 251, + 'climate_state_max_avail_temp': 28, + 'climate_state_min_avail_temp': 15, + 'climate_state_outside_temp': 30, + 'climate_state_passenger_temp_setting': 22, + 'climate_state_remote_heater_control_enabled': False, + 'climate_state_right_temp_direction': 251, + 'climate_state_seat_heater_left': 0, + 'climate_state_seat_heater_rear_center': 0, + 'climate_state_seat_heater_rear_left': 0, + 'climate_state_seat_heater_rear_right': 0, + 'climate_state_seat_heater_right': 0, + 'climate_state_side_mirror_heaters': False, + 'climate_state_steering_wheel_heat_level': 0, + 'climate_state_steering_wheel_heater': False, + 'climate_state_supports_fan_only_cabin_overheat_protection': True, + 'climate_state_timestamp': 1705707520649, + 'climate_state_wiper_blade_heater': False, + 'color': None, + 'drive_state_active_route_latitude': '**REDACTED**', + 'drive_state_active_route_longitude': '**REDACTED**', + 'drive_state_active_route_miles_to_arrival': 0.039491, + 'drive_state_active_route_minutes_to_arrival': 0.103577, + 'drive_state_active_route_traffic_minutes_delay': 0, + 'drive_state_gps_as_of': 1701129612, + 'drive_state_heading': 185, + 'drive_state_latitude': '**REDACTED**', + 'drive_state_longitude': '**REDACTED**', + 'drive_state_native_latitude': '**REDACTED**', + 'drive_state_native_location_supported': 1, + 'drive_state_native_longitude': '**REDACTED**', + 'drive_state_native_type': 'wgs', + 'drive_state_power': -7, + 'drive_state_shift_state': None, + 'drive_state_speed': None, + 'drive_state_timestamp': 1705707520649, + 'granular_access_hide_private': False, + 'gui_settings_gui_24_hour_time': False, + 'gui_settings_gui_charge_rate_units': 'kW', + 'gui_settings_gui_distance_units': 'km/hr', + 'gui_settings_gui_range_display': 'Rated', + 'gui_settings_gui_temperature_units': 'C', + 'gui_settings_gui_tirepressure_units': 'Psi', + 'gui_settings_show_range_units': False, + 'gui_settings_timestamp': 1705707520649, + 'id': '**REDACTED**', + 'id_s': '**REDACTED**', + 'in_service': False, + 'state': 'online', + 'tokens': '**REDACTED**', + 'user_id': '**REDACTED**', + 'vehicle_config_aux_park_lamps': 'Eu', + 'vehicle_config_badge_version': 1, + 'vehicle_config_can_accept_navigation_requests': True, + 'vehicle_config_can_actuate_trunks': True, + 'vehicle_config_car_special_type': 'base', + 'vehicle_config_car_type': 'model3', + 'vehicle_config_charge_port_type': 'CCS', + 'vehicle_config_cop_user_set_temp_supported': True, + 'vehicle_config_dashcam_clip_save_supported': True, + 'vehicle_config_default_charge_to_max': False, + 'vehicle_config_driver_assist': 'TeslaAP3', + 'vehicle_config_ece_restrictions': False, + 'vehicle_config_efficiency_package': 'M32021', + 'vehicle_config_eu_vehicle': True, + 'vehicle_config_exterior_color': 'DeepBlue', + 'vehicle_config_exterior_trim': 'Black', + 'vehicle_config_exterior_trim_override': '', + 'vehicle_config_has_air_suspension': False, + 'vehicle_config_has_ludicrous_mode': False, + 'vehicle_config_has_seat_cooling': False, + 'vehicle_config_headlamp_type': 'Global', + 'vehicle_config_interior_trim_type': 'White2', + 'vehicle_config_key_version': 2, + 'vehicle_config_motorized_charge_port': True, + 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', + 'vehicle_config_performance_package': 'Base', + 'vehicle_config_plg': True, + 'vehicle_config_pws': True, + 'vehicle_config_rear_drive_unit': 'PM216MOSFET', + 'vehicle_config_rear_seat_heaters': 1, + 'vehicle_config_rear_seat_type': 0, + 'vehicle_config_rhd': True, + 'vehicle_config_roof_color': 'RoofColorGlass', + 'vehicle_config_seat_type': None, + 'vehicle_config_spoiler_type': 'None', + 'vehicle_config_sun_roof_installed': True, + 'vehicle_config_supports_qr_pairing': False, + 'vehicle_config_third_row_seats': 'None', + 'vehicle_config_timestamp': 1705707520649, + 'vehicle_config_trim_badging': '74d', + 'vehicle_config_use_range_badging': True, + 'vehicle_config_utc_offset': 36000, + 'vehicle_config_webcam_selfie_supported': True, + 'vehicle_config_webcam_supported': True, + 'vehicle_config_wheel_type': 'Pinwheel18CapKit', + 'vehicle_id': '**REDACTED**', + 'vehicle_state_api_version': 71, + 'vehicle_state_autopark_state_v2': 'unavailable', + 'vehicle_state_calendar_supported': True, + 'vehicle_state_car_version': '2023.44.30.8 06f534d46010', + 'vehicle_state_center_display_state': 0, + 'vehicle_state_dashcam_clip_save_available': True, + 'vehicle_state_dashcam_state': 'Recording', + 'vehicle_state_df': 0, + 'vehicle_state_dr': 0, + 'vehicle_state_fd_window': 0, + 'vehicle_state_feature_bitmask': 'fbdffbff,187f', + 'vehicle_state_fp_window': 0, + 'vehicle_state_ft': 0, + 'vehicle_state_is_user_present': False, + 'vehicle_state_locked': False, + 'vehicle_state_media_info_a2dp_source_name': 'Pixel 8 Pro', + 'vehicle_state_media_info_audio_volume': 1.6667, + 'vehicle_state_media_info_audio_volume_increment': 0.333333, + 'vehicle_state_media_info_audio_volume_max': 10.333333, + 'vehicle_state_media_info_media_playback_status': 'Playing', + 'vehicle_state_media_info_now_playing_album': 'Elon Musk', + 'vehicle_state_media_info_now_playing_artist': 'Walter Isaacson', + 'vehicle_state_media_info_now_playing_duration': 651000, + 'vehicle_state_media_info_now_playing_elapsed': 1000, + 'vehicle_state_media_info_now_playing_source': 'Audible', + 'vehicle_state_media_info_now_playing_station': 'Elon Musk', + 'vehicle_state_media_info_now_playing_title': 'Chapter 51: Cybertruck: Tesla, 2018–2019', + 'vehicle_state_media_state_remote_control_enabled': True, + 'vehicle_state_notifications_supported': True, + 'vehicle_state_odometer': 6481.019282, + 'vehicle_state_parsed_calendar_supported': True, + 'vehicle_state_pf': 0, + 'vehicle_state_pr': 0, + 'vehicle_state_rd_window': 0, + 'vehicle_state_remote_start': False, + 'vehicle_state_remote_start_enabled': True, + 'vehicle_state_remote_start_supported': True, + 'vehicle_state_rp_window': 0, + 'vehicle_state_rt': 0, + 'vehicle_state_santa_mode': 0, + 'vehicle_state_sentry_mode': False, + 'vehicle_state_sentry_mode_available': True, + 'vehicle_state_service_mode': False, + 'vehicle_state_service_mode_plus': False, + 'vehicle_state_software_update_download_perc': 100, + 'vehicle_state_software_update_expected_duration_sec': 2700, + 'vehicle_state_software_update_install_perc': 1, + 'vehicle_state_software_update_status': 'available', + 'vehicle_state_software_update_version': '2024.12.0.0', + 'vehicle_state_speed_limit_mode_active': False, + 'vehicle_state_speed_limit_mode_current_limit_mph': 69, + 'vehicle_state_speed_limit_mode_max_limit_mph': 120, + 'vehicle_state_speed_limit_mode_min_limit_mph': 50, + 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_sun_roof_state': 'open', + 'vehicle_state_timestamp': 1705707520649, + 'vehicle_state_tpms_hard_warning_fl': False, + 'vehicle_state_tpms_hard_warning_fr': False, + 'vehicle_state_tpms_hard_warning_rl': False, + 'vehicle_state_tpms_hard_warning_rr': False, + 'vehicle_state_tpms_last_seen_pressure_time_fl': 1705700812, + 'vehicle_state_tpms_last_seen_pressure_time_fr': 1705700793, + 'vehicle_state_tpms_last_seen_pressure_time_rl': 1705700794, + 'vehicle_state_tpms_last_seen_pressure_time_rr': 1705700823, + 'vehicle_state_tpms_pressure_fl': 2.775, + 'vehicle_state_tpms_pressure_fr': 2.8, + 'vehicle_state_tpms_pressure_rl': 2.775, + 'vehicle_state_tpms_pressure_rr': 2.775, + 'vehicle_state_tpms_rcp_front_value': 2.9, + 'vehicle_state_tpms_rcp_rear_value': 2.9, + 'vehicle_state_tpms_soft_warning_fl': False, + 'vehicle_state_tpms_soft_warning_fr': False, + 'vehicle_state_tpms_soft_warning_rl': False, + 'vehicle_state_tpms_soft_warning_rr': False, + 'vehicle_state_valet_mode': False, + 'vehicle_state_valet_pin_needed': False, + 'vehicle_state_vehicle_name': 'Test', + 'vehicle_state_vehicle_self_test_progress': 0, + 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, + 'vehicle_state_webcam_available': True, + 'vin': '**REDACTED**', + }), + }), + ]), + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_init.ambr b/tests/components/tesla_fleet/snapshots/test_init.ambr new file mode 100644 index 00000000000..e9828db9f1b --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_init.ambr @@ -0,0 +1,129 @@ +# serializer version: 1 +# name: test_devices[{('tesla_fleet', '123456')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + '123456', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Powerwall 2, Tesla Backup Gateway 2', + 'model_id': None, + 'name': 'Energy Site', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '123456', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[{('tesla_fleet', 'LRWXF7EK4KC700000')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'LRWXF7EK4KC700000', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Model X', + 'model_id': None, + 'name': 'Test', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': 'LRWXF7EK4KC700000', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': None, + }) +# --- +# name: test_devices[{('tesla_fleet', 'abd-123')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'abd-123', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Gen 3 Wall Connector', + 'model_id': None, + 'name': 'Wall Connector', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '123', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- +# name: test_devices[{('tesla_fleet', 'bcd-234')}] + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'tesla_fleet', + 'bcd-234', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Tesla', + 'model': 'Gen 3 Wall Connector', + 'model_id': None, + 'name': 'Wall Connector', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '234', + 'suggested_area': None, + 'sw_version': None, + 'via_device_id': , + }) +# --- diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..e4c4c3d96c2 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -0,0 +1,3424 @@ +# serializer version: 1 +# name: test_sensors[sensor.energy_site_battery_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_battery_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'battery_power', + 'unique_id': '123456-battery_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_battery_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Battery power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.06', + }) +# --- +# name: test_sensors[sensor.energy_site_battery_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Battery power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_battery_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.06', + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_energy_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'energy_left', + 'unique_id': '123456-energy_left', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Energy left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_energy_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8964736842105', + }) +# --- +# name: test_sensors[sensor.energy_site_energy_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Energy left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_energy_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '38.8964736842105', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_generator_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Generator power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'generator_power', + 'unique_id': '123456-generator_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Generator power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_generator_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Generator power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_generator_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_power', + 'unique_id': '123456-grid_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_grid_services_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Grid services power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_power', + 'unique_id': '123456-grid_services_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid services power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_grid_services_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Grid services power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_grid_services_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_load_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Load power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'load_power', + 'unique_id': '123456-load_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Load power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_load_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.245', + }) +# --- +# name: test_sensors[sensor.energy_site_load_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Load power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_load_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.245', + }) +# --- +# name: test_sensors[sensor.energy_site_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'island_status', + 'unique_id': '123456-island_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site None', + }), + 'context': , + 'entity_id': 'sensor.energy_site_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_none-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Energy Site None', + }), + 'context': , + 'entity_id': 'sensor.energy_site_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on_grid', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_percentage_charged', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Percentage charged', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'percentage_charged', + 'unique_id': '123456-percentage_charged', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Percentage charged', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_percentage_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.5053740373966', + }) +# --- +# name: test_sensors[sensor.energy_site_percentage_charged-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Percentage charged', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_percentage_charged', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '95.5053740373966', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_solar_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Solar power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'solar_power', + 'unique_id': '123456-solar_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Solar power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.185', + }) +# --- +# name: test_sensors[sensor.energy_site_solar_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Energy Site Solar power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_solar_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.185', + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total pack energy', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'total_pack_energy', + 'unique_id': '123456-total_pack_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Total pack energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.727', + }) +# --- +# name: test_sensors[sensor.energy_site_total_pack_energy-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy_storage', + 'friendly_name': 'Energy Site Total pack energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.energy_site_total_pack_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.727', + }) +# --- +# name: test_sensors[sensor.energy_site_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.energy_site_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'version', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'version', + 'unique_id': '123456-version', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.energy_site_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site version', + }), + 'context': , + 'entity_id': 'sensor.energy_site_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.44.0 eb113390', + }) +# --- +# name: test_sensors[sensor.energy_site_version-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site version', + }), + 'context': , + 'entity_id': 'sensor.energy_site_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '23.44.0 eb113390', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VPP backup reserve', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vpp_backup_reserve_percent', + 'unique_id': '123456-vpp_backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site VPP backup reserve', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.energy_site_vpp_backup_reserve-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site VPP backup reserve', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.energy_site_vpp_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery level', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_level', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_battery_level-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_charge_cable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charge_cable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charge cable', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_conn_charge_cable', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_conn_charge_cable', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_charge_cable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'IEC', + }) +# --- +# name: test_sensors[sensor.test_charge_cable-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Charge cable', + }), + 'context': , + 'entity_id': 'sensor.test_charge_cable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'IEC', + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charge_energy_added', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge energy added', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_energy_added', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_energy_added', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_energy_added', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_energy_added-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Test Charge energy added', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_energy_added', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charge_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charge rate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charge_rate', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charge_rate', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charge_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charge_rate-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Charge rate', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charge_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charger_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger current', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_actual_current', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_actual_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_current-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Test Charger current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charger_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_power', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Charger power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_charger_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charger voltage', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charger_voltage', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charger_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.test_charger_voltage-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Test Charger voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_charger_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.test_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_charging_state', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_charging_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[sensor.test_charging-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Charging', + 'options': list([ + 'starting', + 'charging', + 'stopped', + 'complete', + 'disconnected', + 'no_power', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'stopped', + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_distance_to_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Distance to arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_miles_to_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_miles_to_arrival', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_distance_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.063555', + }) +# --- +# name: test_sensors[sensor.test_distance_to_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Distance to arrival', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_distance_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Driver temperature setting', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_driver_temp_setting', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_driver_temp_setting', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_driver_temperature_setting-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Driver temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_driver_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_estimate_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Estimate battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_est_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_est_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_estimate_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '442.63397376', + }) +# --- +# name: test_sensors[sensor.test_estimate_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Estimate battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_estimate_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '442.63397376', + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_fast_charger_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fast charger type', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_fast_charger_type', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_fast_charger_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Fast charger type', + }), + 'context': , + 'entity_id': 'sensor.test_fast_charger_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ACSingleWireCAN', + }) +# --- +# name: test_sensors[sensor.test_fast_charger_type-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Fast charger type', + }), + 'context': , + 'entity_id': 'sensor.test_fast_charger_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'ACSingleWireCAN', + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_ideal_battery_range', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ideal battery range', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_ideal_battery_range', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_ideal_battery_range', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_ideal_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_ideal_battery_range-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Ideal battery range', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_ideal_battery_range', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '429.48563328', + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_inside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inside temperature', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_inside_temp', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_inside_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_inside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29.8', + }) +# --- +# name: test_sensors[sensor.test_inside_temperature-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Inside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_inside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '29.8', + }) +# --- +# name: test_sensors[sensor.test_odometer-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_odometer', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Odometer', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_odometer', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_odometer', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_odometer-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_odometer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10430.189495371', + }) +# --- +# name: test_sensors[sensor.test_odometer-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'distance', + 'friendly_name': 'Test Odometer', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_odometer', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10430.189495371', + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_outside_temp', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_outside_temp', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensors[sensor.test_outside_temperature-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Passenger temperature setting', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_passenger_temp_setting', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_passenger_temp_setting', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_passenger_temperature_setting-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Test Passenger temperature setting', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_passenger_temperature_setting', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_sensors[sensor.test_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_power', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-7', + }) +# --- +# name: test_sensors[sensor.test_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Test Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-7', + }) +# --- +# name: test_sensors[sensor.test_shift_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_shift_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Shift state', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_shift_state', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_shift_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_shift_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_shift_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'p', + }) +# --- +# name: test_sensors[sensor.test_shift_state-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Test Shift state', + 'options': list([ + 'p', + 'd', + 'r', + 'n', + ]), + }), + 'context': , + 'entity_id': 'sensor.test_shift_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'p', + }) +# --- +# name: test_sensors[sensor.test_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Speed', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_speed', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_speed', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_speed-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speed', + 'friendly_name': 'Test Speed', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State of charge at arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_energy_at_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_energy_at_arrival', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.test_state_of_charge_at_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test State of charge at arrival', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_state_of_charge_at_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_time_to_arrival', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Time to arrival', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_minutes_to_arrival', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_minutes_to_arrival', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to arrival', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-01T00:00:06+00:00', + }) +# --- +# name: test_sensors[sensor.test_time_to_arrival-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to arrival', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_arrival', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Time to full charge', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_minutes_to_full_charge', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_minutes_to_full_charge', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to full charge', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_time_to_full_charge-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Test Time to full charge', + }), + 'context': , + 'entity_id': 'sensor.test_time_to_full_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure front left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_fl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fl', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure front right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_fr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_fr', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.6105682912393', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_front_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure front right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_front_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.6105682912393', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure rear left', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_rl', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rl', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_left-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear left', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Tire pressure rear right', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_tpms_pressure_rr', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_tpms_pressure_rr', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_tire_pressure_rear_right-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'pressure', + 'friendly_name': 'Test Tire pressure rear right', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_tire_pressure_rear_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40.2479739314961', + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_traffic_delay', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Traffic delay', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'drive_state_active_route_traffic_minutes_delay', + 'unique_id': 'LRWXF7EK4KC700000-drive_state_active_route_traffic_minutes_delay', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_traffic_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_traffic_delay-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'Test Traffic delay', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.test_traffic_delay', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.test_usable_battery_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Usable battery level', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charge_state_usable_battery_level', + 'unique_id': 'LRWXF7EK4KC700000-charge_state_usable_battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_usable_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.test_usable_battery_level-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Test Usable battery level', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.test_usable_battery_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '77', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fault state code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_fault_state', + 'unique_id': '123456-abd-123-wall_connector_fault_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fault state code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_fault_state', + 'unique_id': '123456-bcd-234-wall_connector_fault_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_fault_state_code_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Fault state code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_fault_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-abd-123-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_power_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_power', + 'unique_id': '123456-bcd-234-wall_connector_power', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_power_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Wall Connector Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wall_connector_power_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-abd-123-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'State code', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wall_connector_state', + 'unique_id': '123456-bcd-234-wall_connector_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_state_code_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector State code', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_state_code_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-abd-123-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Vehicle', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vin', + 'unique_id': '123456-bcd-234-vin', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensors[sensor.wall_connector_vehicle_2-statealt] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Wall Connector Vehicle', + }), + 'context': , + 'entity_id': 'sensor.wall_connector_vehicle_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensors.py new file mode 100644 index 00000000000..ffbaac5e6d8 --- /dev/null +++ b/tests/components/tesla_fleet/test_binary_sensors.py @@ -0,0 +1,64 @@ +"""Test the Tesla Fleet binary sensor platform.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor_refresh( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data, + freezer: FrozenDateTimeFactory, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + + # Refresh + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_binary_sensor_offline( + hass: HomeAssistant, + mock_vehicle_data, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the binary sensor entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.BINARY_SENSOR]) + state = hass.states.get("binary_sensor.test_status") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py new file mode 100644 index 00000000000..bd1c7d7c2b8 --- /dev/null +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -0,0 +1,225 @@ +"""Test the Tesla Fleet config flow.""" + +from unittest.mock import patch +from urllib.parse import parse_qs, urlparse + +import pytest + +from homeassistant.components.tesla_fleet.const import ( + AUTHORIZE_URL, + CLIENT_ID, + DOMAIN, + SCOPES, + TOKEN_URL, +) +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +REDIRECT = "https://example.com/auth/external/callback" +UNIQUE_ID = "uid" + + +@pytest.fixture +async def access_token(hass: HomeAssistant) -> dict[str, str | list[str]]: + """Return a valid access token.""" + return config_entry_oauth2_flow._encode_jwt( + hass, + { + "sub": UNIQUE_ID, + "aud": [], + "scp": [ + "vehicle_device_data", + "vehicle_cmds", + "vehicle_charging_cmds", + "energy_device_data", + "energy_cmds", + "offline_access", + "openid", + ], + "ou_code": "NA", + }, + ) + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + + assert result["type"] is FlowResultType.EXTERNAL_STEP + + assert result["url"].startswith(AUTHORIZE_URL) + parsed_url = urlparse(result["url"]) + parsed_query = parse_qs(parsed_url.query) + assert parsed_query["response_type"][0] == "code" + assert parsed_query["client_id"][0] == CLIENT_ID + assert parsed_query["redirect_uri"][0] == REDIRECT + assert parsed_query["state"][0] == state + assert parsed_query["scope"][0] == " ".join(SCOPES) + assert parsed_query["code_challenge"][0] is not None + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == UNIQUE_ID + assert "result" in result + assert result["result"].unique_id == UNIQUE_ID + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == access_token + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauthentication( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token, +) -> None: + """Test Tesla Fleet reauthentication.""" + old_entry = MockConfigEntry( + domain=DOMAIN, + unique_id=UNIQUE_ID, + version=1, + data={}, + ) + old_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_account_mismatch( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token, +) -> None: + """Test Tesla Fleet reauthentication with different account.""" + old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) + old_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": old_entry.unique_id, + "entry_id": old_entry.entry_id, + }, + data=old_entry.data, + ) + + flows = hass.config_entries.flow.async_progress() + result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + client = await hass_client_no_auth() + await client.get(f"/auth/external/callback?code=abcd&state={state}") + + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_account_mismatch" diff --git a/tests/components/tesla_fleet/test_device_tracker.py b/tests/components/tesla_fleet/test_device_tracker.py new file mode 100644 index 00000000000..66a0c06de7f --- /dev/null +++ b/tests/components/tesla_fleet/test_device_tracker.py @@ -0,0 +1,37 @@ +"""Test the Tesla Fleet device tracker platform.""" + +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import VehicleOffline + +from homeassistant.const import STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform + +from tests.common import MockConfigEntry + + +async def test_device_tracker( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the device tracker entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_device_tracker_offline( + hass: HomeAssistant, + mock_vehicle_data, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the device tracker entities are correct when offline.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.DEVICE_TRACKER]) + state = hass.states.get("device_tracker.test_location") + assert state.state == STATE_UNKNOWN diff --git a/tests/components/tesla_fleet/test_diagnostics.py b/tests/components/tesla_fleet/test_diagnostics.py new file mode 100644 index 00000000000..e0ef24097bb --- /dev/null +++ b/tests/components/tesla_fleet/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Test the Tesla Fleet Diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, +) -> None: + """Test diagnostics.""" + + await setup_platform(hass, normal_config_entry) + + diag = await get_diagnostics_for_config_entry( + hass, hass_client, normal_config_entry + ) + assert diag == snapshot diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py new file mode 100644 index 00000000000..20bb6c66906 --- /dev/null +++ b/tests/components/tesla_fleet/test_init.py @@ -0,0 +1,328 @@ +"""Test the Tesla Fleet init.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from tesla_fleet_api.exceptions import ( + InvalidToken, + LoginRequired, + OAuthExpired, + RateLimited, + TeslaFleetError, + VehicleOffline, +) + +from homeassistant.components.tesla_fleet.coordinator import ( + ENERGY_INTERVAL, + ENERGY_INTERVAL_SECONDS, + VEHICLE_INTERVAL, + VEHICLE_INTERVAL_SECONDS, + VEHICLE_WAIT, +) +from homeassistant.components.tesla_fleet.models import TeslaFleetData +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_platform +from .const import VEHICLE_ASLEEP, VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + +ERRORS = [ + (InvalidToken, ConfigEntryState.SETUP_ERROR), + (OAuthExpired, ConfigEntryState.SETUP_ERROR), + (LoginRequired, ConfigEntryState.SETUP_ERROR), + (TeslaFleetError, ConfigEntryState.SETUP_RETRY), +] + + +async def test_load_unload( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Test load and unload.""" + + await setup_platform(hass, normal_config_entry) + + assert normal_config_entry.state is ConfigEntryState.LOADED + assert isinstance(normal_config_entry.runtime_data, TeslaFleetData) + assert await hass.config_entries.async_unload(normal_config_entry.entry_id) + await hass.async_block_till_done() + assert normal_config_entry.state is ConfigEntryState.NOT_LOADED + assert not hasattr(normal_config_entry, "runtime_data") + + +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_init_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products, + side_effect, + state, +) -> None: + """Test init with errors.""" + + mock_products.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +# Test devices +async def test_devices( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test device registry.""" + await setup_platform(hass, normal_config_entry) + devices = dr.async_entries_for_config_entry( + device_registry, normal_config_entry.entry_id + ) + + for device in devices: + assert device == snapshot(name=f"{device.identifiers}") + + +# Vehicle Coordinator +async def test_vehicle_refresh_offline( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_state, + mock_vehicle_data, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is ConfigEntryState.LOADED + + mock_vehicle_state.assert_called_once() + mock_vehicle_data.assert_called_once() + mock_vehicle_state.reset_mock() + mock_vehicle_data.reset_mock() + + # Then the vehicle goes offline + mock_vehicle_data.side_effect = VehicleOffline + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_vehicle_state.assert_not_called() + mock_vehicle_data.assert_called_once() + mock_vehicle_data.reset_mock() + + # And stays offline + mock_vehicle_state.return_value = VEHICLE_ASLEEP + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_vehicle_state.assert_called_once() + mock_vehicle_data.assert_not_called() + + +@pytest.mark.parametrize(("side_effect"), ERRORS) +async def test_vehicle_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data: AsyncMock, + side_effect: TeslaFleetError, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh makes entity unavailable.""" + + await setup_platform(hass, normal_config_entry) + + mock_vehicle_data.side_effect = side_effect + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert (state := hass.states.get("sensor.test_battery_level")) + assert state.state == "unavailable" + + +async def test_vehicle_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + mock_vehicle_data.side_effect = RateLimited( + {"after": VEHICLE_INTERVAL_SECONDS + 10} + ) + await setup_platform(hass, normal_config_entry) + + assert (state := hass.states.get("sensor.test_battery_level")) + assert state.state == "unknown" + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_vehicle_data.call_count == 2 + + +async def test_vehicle_sleep( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_vehicle_data, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + await setup_platform(hass, normal_config_entry) + assert mock_vehicle_data.call_count == 1 + + freezer.tick(VEHICLE_WAIT + VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Let vehicle sleep, no updates for 15 minutes + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # No polling, call_count should not increase + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # No polling, call_count should not increase + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 2 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Vehicle didn't sleep, go back to normal + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 3 + + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Regular polling + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 4 + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + # Vehicle active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 5 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Dont let sleep when active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 6 + + freezer.tick(VEHICLE_WAIT) + async_fire_time_changed(hass) + # Dont let sleep when active + await hass.async_block_till_done() + assert mock_vehicle_data.call_count == 7 + + +# Test Energy Live Coordinator +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_energy_live_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_live_status, + side_effect, + state, +) -> None: + """Test coordinator refresh with an error.""" + mock_live_status.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +# Test Energy Site Coordinator +@pytest.mark.parametrize(("side_effect", "state"), ERRORS) +async def test_energy_site_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_site_info, + side_effect, + state, +) -> None: + """Test coordinator refresh with an error.""" + mock_site_info.side_effect = side_effect + await setup_platform(hass, normal_config_entry) + assert normal_config_entry.state is state + + +async def test_energy_live_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_live_status, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + await setup_platform(hass, normal_config_entry) + + mock_live_status.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_live_status.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_live_status.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_live_status.call_count == 3 + + +async def test_energy_info_refresh_ratelimited( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_site_info, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh handles 429.""" + + await setup_platform(hass, normal_config_entry) + + mock_site_info.side_effect = RateLimited({"after": ENERGY_INTERVAL_SECONDS + 10}) + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_site_info.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Should not call for another 10 seconds + assert mock_site_info.call_count == 2 + + freezer.tick(ENERGY_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_site_info.call_count == 3 diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py new file mode 100644 index 00000000000..2133194e2a0 --- /dev/null +++ b/tests/components/tesla_fleet/test_sensor.py @@ -0,0 +1,41 @@ +"""Test the Tesla Fleet sensor platform.""" + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, assert_entities_alt, setup_platform +from .const import VEHICLE_DATA_ALT + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + normal_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_vehicle_data, +) -> None: + """Tests that the sensor entities are correct.""" + + freezer.move_to("2024-01-01 00:00:00+00:00") + + await setup_platform(hass, normal_config_entry, [Platform.SENSOR]) + + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + # Coordinator refresh + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert_entities_alt(hass, normal_config_entry.entry_id, entity_registry, snapshot) diff --git a/tests/components/teslemetry/conftest.py b/tests/components/teslemetry/conftest.py index 410eaa62b69..03b9e2c6eb6 100644 --- a/tests/components/teslemetry/conftest.py +++ b/tests/components/teslemetry/conftest.py @@ -2,8 +2,9 @@ from __future__ import annotations +from collections.abc import Generator from copy import deepcopy -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest @@ -37,7 +38,7 @@ def mock_products(): @pytest.fixture(autouse=True) -def mock_vehicle_data(): +def mock_vehicle_data() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle_data method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle_data", @@ -57,7 +58,7 @@ def mock_wake_up(): @pytest.fixture(autouse=True) -def mock_vehicle(): +def mock_vehicle() -> Generator[AsyncMock]: """Mock Tesla Fleet API Vehicle Specific vehicle method.""" with patch( "homeassistant.components.teslemetry.VehicleSpecific.vehicle", diff --git a/tests/components/teslemetry/fixtures/products.json b/tests/components/teslemetry/fixtures/products.json index e1b76e4cefb..8da921a33f4 100644 --- a/tests/components/teslemetry/fixtures/products.json +++ b/tests/components/teslemetry/fixtures/products.json @@ -115,7 +115,17 @@ "features": { "rate_plan_manager_no_pricing_constraint": true } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } } ], - "count": 2 + "count": 3 } diff --git a/tests/components/teslemetry/fixtures/vehicle_data.json b/tests/components/teslemetry/fixtures/vehicle_data.json index 6c787df4897..3845ae48559 100644 --- a/tests/components/teslemetry/fixtures/vehicle_data.json +++ b/tests/components/teslemetry/fixtures/vehicle_data.json @@ -176,7 +176,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1705707520649, @@ -250,6 +250,8 @@ "min_limit_mph": 50, "pin_code_set": true }, + "sun_roof_state": "open", + "vehicle_state_sun_roof_percent_open": 20, "timestamp": 1705707520649, "tpms_hard_warning_fl": false, "tpms_hard_warning_fr": false, diff --git a/tests/components/teslemetry/snapshots/test_cover.ambr b/tests/components/teslemetry/snapshots/test_cover.ambr index 7689a08a373..7ffb9c4a1f9 100644 --- a/tests/components/teslemetry/snapshots/test_cover.ambr +++ b/tests/components/teslemetry/snapshots/test_cover.ambr @@ -95,6 +95,54 @@ 'state': 'closed', }) # --- +# name: test_cover[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_cover[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -287,6 +335,54 @@ 'state': 'open', }) # --- +# name: test_cover_alt[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_alt[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_cover_alt[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -479,6 +575,54 @@ 'state': 'closed', }) # --- +# name: test_cover_noscope[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'LRWXF7EK4KC700000-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_cover_noscope[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_cover_noscope[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/teslemetry/snapshots/test_diagnostics.ambr b/tests/components/teslemetry/snapshots/test_diagnostics.ambr index 4a942daa508..11f8a91c1aa 100644 --- a/tests/components/teslemetry/snapshots/test_diagnostics.ambr +++ b/tests/components/teslemetry/snapshots/test_diagnostics.ambr @@ -337,7 +337,7 @@ 'vehicle_config_roof_color': 'RoofColorGlass', 'vehicle_config_seat_type': None, 'vehicle_config_spoiler_type': 'None', - 'vehicle_config_sun_roof_installed': None, + 'vehicle_config_sun_roof_installed': True, 'vehicle_config_supports_qr_pairing': False, 'vehicle_config_third_row_seats': 'None', 'vehicle_config_timestamp': 1705707520649, @@ -402,6 +402,7 @@ 'vehicle_state_speed_limit_mode_max_limit_mph': 120, 'vehicle_state_speed_limit_mode_min_limit_mph': 50, 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_sun_roof_state': 'open', 'vehicle_state_timestamp': 1705707520649, 'vehicle_state_tpms_hard_warning_fl': False, 'vehicle_state_tpms_hard_warning_fr': False, @@ -426,6 +427,7 @@ 'vehicle_state_vehicle_name': 'Test', 'vehicle_state_vehicle_self_test_progress': 0, 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_vehicle_state_sun_roof_percent_open': 20, 'vehicle_state_webcam_available': True, 'vin': '**REDACTED**', }), diff --git a/tests/components/teslemetry/snapshots/test_init.ambr b/tests/components/teslemetry/snapshots/test_init.ambr index e5dd23ada6e..e07f075b7d8 100644 --- a/tests/components/teslemetry/snapshots/test_init.ambr +++ b/tests/components/teslemetry/snapshots/test_init.ambr @@ -21,6 +21,7 @@ }), 'manufacturer': 'Tesla', 'model': 'Powerwall 2, Tesla Backup Gateway 2', + 'model_id': None, 'name': 'Energy Site', 'name_by_user': None, 'primary_config_entry': , @@ -52,6 +53,7 @@ }), 'manufacturer': 'Tesla', 'model': 'Model X', + 'model_id': None, 'name': 'Test', 'name_by_user': None, 'primary_config_entry': , @@ -83,6 +85,7 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', + 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, 'primary_config_entry': , @@ -114,6 +117,7 @@ }), 'manufacturer': 'Tesla', 'model': 'Gen 3 Wall Connector', + 'model_id': None, 'name': 'Wall Connector', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 250413396c1..31a39f1f21a 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -199,7 +199,7 @@ async def test_climate( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, blocking=True, ) diff --git a/tests/components/teslemetry/test_cover.py b/tests/components/teslemetry/test_cover.py index 5f99a5d9c79..8d4493ab25f 100644 --- a/tests/components/teslemetry/test_cover.py +++ b/tests/components/teslemetry/test_cover.py @@ -2,6 +2,7 @@ from unittest.mock import patch +import pytest from syrupy import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline @@ -9,6 +10,7 @@ from homeassistant.components.cover import ( DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, + SERVICE_STOP_COVER, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -24,6 +26,7 @@ from . import assert_entities, setup_platform from .const import COMMAND_OK, METADATA_NOSCOPE, VEHICLE_DATA_ALT +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -35,19 +38,21 @@ async def test_cover( assert_entities(hass, entry.entry_id, entity_registry, snapshot) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_alt( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, mock_vehicle_data, ) -> None: - """Tests that the cover entities are correct without scopes.""" + """Tests that the cover entities are correct with alternate values.""" mock_vehicle_data.return_value = VEHICLE_DATA_ALT entry = await setup_platform(hass, [Platform.COVER]) assert_entities(hass, entry.entry_id, entity_registry, snapshot) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_noscope( hass: HomeAssistant, snapshot: SnapshotAssertion, @@ -73,6 +78,7 @@ async def test_cover_offline( assert state.state == STATE_UNKNOWN +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_cover_services( hass: HomeAssistant, ) -> None: @@ -186,3 +192,44 @@ async def test_cover_services( state = hass.states.get(entity_id) assert state assert state.state is STATE_CLOSED + + # Sunroof + entity_id = "cover.test_sunroof" + with patch( + "homeassistant.components.teslemetry.VehicleSpecific.sun_roof_control", + return_value=COMMAND_OK, + ) as call: + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_OPEN + + call.reset_mock() + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + call.assert_called_once() + state = hass.states.get(entity_id) + assert state + assert state.state is STATE_CLOSED diff --git a/tests/components/teslemetry/test_init.py b/tests/components/teslemetry/test_init.py index 31b4202b521..5520a5549bd 100644 --- a/tests/components/teslemetry/test_init.py +++ b/tests/components/teslemetry/test_init.py @@ -1,5 +1,7 @@ """Test the Teslemetry init.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -21,7 +23,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from . import setup_platform -from .const import VEHICLE_DATA_ALT +from .const import VEHICLE_DATA_ALT, WAKE_UP_ASLEEP from tests.common import async_fire_time_changed @@ -68,6 +70,21 @@ async def test_devices( # Vehicle Coordinator +async def test_vehicle_refresh_asleep( + hass: HomeAssistant, + mock_vehicle: AsyncMock, + mock_vehicle_data: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator refresh with an error.""" + + mock_vehicle.return_value = WAKE_UP_ASLEEP + entry = await setup_platform(hass, [Platform.CLIMATE]) + assert entry.state is ConfigEntryState.LOADED + mock_vehicle.assert_called_once() + mock_vehicle_data.assert_not_called() + + async def test_vehicle_refresh_offline( hass: HomeAssistant, mock_vehicle_data, freezer: FrozenDateTimeFactory ) -> None: diff --git a/tests/components/tessie/fixtures/online.json b/tests/components/tessie/fixtures/online.json index ed49b4bfd75..38b904cdffb 100644 --- a/tests/components/tessie/fixtures/online.json +++ b/tests/components/tessie/fixtures/online.json @@ -98,6 +98,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -157,7 +159,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -173,7 +175,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/fixtures/products.json b/tests/components/tessie/fixtures/products.json index e1b76e4cefb..8da921a33f4 100644 --- a/tests/components/tessie/fixtures/products.json +++ b/tests/components/tessie/fixtures/products.json @@ -115,7 +115,17 @@ "features": { "rate_plan_manager_no_pricing_constraint": true } + }, + { + "energy_site_id": 98765, + "components": { + "battery": false, + "solar": false, + "grid": false, + "load_meter": false, + "market_type": "residential" + } } ], - "count": 2 + "count": 3 } diff --git a/tests/components/tessie/fixtures/vehicles.json b/tests/components/tessie/fixtures/vehicles.json index 359e23f9cdd..622b31bae69 100644 --- a/tests/components/tessie/fixtures/vehicles.json +++ b/tests/components/tessie/fixtures/vehicles.json @@ -111,6 +111,8 @@ "passenger_temp_setting": 22.5, "remote_heater_control_enabled": false, "right_temp_direction": 234, + "seat_fan_front_left": 0, + "seat_fan_front_right": 0, "seat_heater_left": 0, "seat_heater_rear_center": 0, "seat_heater_rear_left": 0, @@ -174,7 +176,7 @@ "exterior_trim_override": "", "has_air_suspension": false, "has_ludicrous_mode": false, - "has_seat_cooling": false, + "has_seat_cooling": true, "headlamp_type": "Global", "interior_trim_type": "White2", "key_version": 2, @@ -190,7 +192,7 @@ "roof_color": "RoofColorGlass", "seat_type": null, "spoiler_type": "None", - "sun_roof_installed": null, + "sun_roof_installed": true, "supports_qr_pairing": false, "third_row_seats": "None", "timestamp": 1701139037461, diff --git a/tests/components/tessie/snapshots/test_binary_sensors.ambr b/tests/components/tessie/snapshots/test_binary_sensors.ambr index 7bc191de6ed..e8912bb0e7f 100644 --- a/tests/components/tessie/snapshots/test_binary_sensors.ambr +++ b/tests/components/tessie/snapshots/test_binary_sensors.ambr @@ -1,4 +1,142 @@ # serializer version: 1 +# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Backup capable', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_capable', + 'unique_id': '123456-backup_capable', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_backup_capable-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Backup capable', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_backup_capable', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services active', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'grid_services_active', + 'unique_id': '123456-grid_services_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services active', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Grid services enabled', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_grid_services_enabled', + 'unique_id': '123456-components_grid_services_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_binary_sensors[binary_sensor.energy_site_grid_services_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Grid services enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.energy_site_grid_services_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_binary_sensors[binary_sensor.test_auto_seat_climate_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index ff04c528244..8c8c9a48c11 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,6 +95,87 @@ 'state': 'closed', }) # --- +# name: test_covers[cover.test_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_sunroof-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_sunroof', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sunroof', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'vehicle_state_sun_roof_state', + 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_sunroof-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'window', + 'friendly_name': 'Test Sunroof', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_sunroof', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- # name: test_covers[cover.test_trunk-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_diagnostics.ambr b/tests/components/tessie/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..8eef7cbd549 --- /dev/null +++ b/tests/components/tessie/snapshots/test_diagnostics.ambr @@ -0,0 +1,428 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'energysites': list([ + dict({ + 'info': dict({ + 'backup_reserve_percent': 0, + 'battery_count': 2, + 'components_backup': True, + 'components_backup_time_remaining_enabled': True, + 'components_batteries': list([ + dict({ + 'device_id': 'battery-1-id', + 'din': 'battery-1-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-10-B', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + dict({ + 'device_id': 'battery-2-id', + 'din': 'battery-2-din', + 'nameplate_energy': 13500, + 'nameplate_max_charge_power': 5000, + 'nameplate_max_discharge_power': 5000, + 'part_name': 'Powerwall 2', + 'part_number': '3012170-05-C', + 'part_type': 2, + 'serial_number': '**REDACTED**', + }), + ]), + 'components_battery': True, + 'components_battery_solar_offset_view_enabled': True, + 'components_battery_type': 'ac_powerwall', + 'components_car_charging_data_supported': False, + 'components_configurable': True, + 'components_customer_preferred_export_rule': 'pv_only', + 'components_disallow_charge_from_grid_with_solar_installed': True, + 'components_energy_service_self_scheduling_enabled': True, + 'components_energy_value_header': 'Energy Value', + 'components_energy_value_subheader': 'Estimated Value', + 'components_flex_energy_request_capable': False, + 'components_gateway': 'teg', + 'components_gateways': list([ + dict({ + 'device_id': 'gateway-id', + 'din': 'gateway-din', + 'firmware_version': '24.4.0 0fe780c9', + 'is_active': True, + 'part_name': 'Tesla Backup Gateway 2', + 'part_number': '1152100-14-J', + 'part_type': 10, + 'serial_number': '**REDACTED**', + 'site_id': '1234-abcd', + 'updated_datetime': '2024-05-14T00:00:00.000Z', + }), + ]), + 'components_grid': True, + 'components_grid_services_enabled': False, + 'components_load_meter': True, + 'components_net_meter_mode': 'battery_ok', + 'components_off_grid_vehicle_charging_reserve_supported': True, + 'components_set_islanding_mode_enabled': True, + 'components_show_grid_import_battery_source_cards': True, + 'components_solar': True, + 'components_solar_type': 'pv_panel', + 'components_solar_value_enabled': True, + 'components_storm_mode_capable': True, + 'components_system_alerts_enabled': True, + 'components_tou_capable': True, + 'components_vehicle_charging_performance_view_enabled': False, + 'components_vehicle_charging_solar_offset_view_enabled': False, + 'components_wall_connectors': list([ + dict({ + 'device_id': '123abc', + 'din': 'abc123', + 'is_active': True, + }), + dict({ + 'device_id': '234bcd', + 'din': 'bcd234', + 'is_active': True, + }), + ]), + 'components_wifi_commissioning_enabled': True, + 'default_real_mode': 'self_consumption', + 'id': '1233-abcd', + 'installation_date': '**REDACTED**', + 'installation_time_zone': '', + 'max_site_meter_power_ac': 1000000000, + 'min_site_meter_power_ac': -1000000000, + 'nameplate_energy': 40500, + 'nameplate_power': 15000, + 'site_name': 'Site', + 'tou_settings_optimization_strategy': 'economics', + 'tou_settings_schedule': list([ + dict({ + 'end_seconds': 3600, + 'start_seconds': 0, + 'target': 'off_peak', + 'week_days': list([ + 1, + 0, + ]), + }), + dict({ + 'end_seconds': 0, + 'start_seconds': 3600, + 'target': 'peak', + 'week_days': list([ + 1, + 0, + ]), + }), + ]), + 'user_settings_breaker_alert_enabled': False, + 'user_settings_go_off_grid_test_banner_enabled': False, + 'user_settings_powerwall_onboarding_settings_set': True, + 'user_settings_powerwall_tesla_electric_interested_in': False, + 'user_settings_storm_mode_enabled': True, + 'user_settings_sync_grid_alert_enabled': True, + 'user_settings_vpp_tour_enabled': True, + 'version': '23.44.0 eb113390', + 'vpp_backup_reserve_percent': 0, + }), + 'live': dict({ + 'backup_capable': True, + 'battery_power': 5060, + 'energy_left': 38896.47368421053, + 'generator_power': 0, + 'grid_power': 0, + 'grid_services_active': False, + 'grid_services_power': 0, + 'grid_status': 'Active', + 'island_status': 'on_grid', + 'load_power': 6245, + 'percentage_charged': 95.50537403739663, + 'solar_power': 1185, + 'storm_mode_active': False, + 'timestamp': '2024-01-01T00:00:00+00:00', + 'total_pack_energy': 40727, + 'wall_connectors': dict({ + 'abd-123': dict({ + 'din': 'abd-123', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + 'bcd-234': dict({ + 'din': 'bcd-234', + 'wall_connector_fault_state': 2, + 'wall_connector_power': 0, + 'wall_connector_state': 2, + }), + }), + }), + }), + ]), + 'vehicles': list([ + dict({ + 'data': dict({ + 'access_type': 'OWNER', + 'api_version': 67, + 'backseat_token': None, + 'backseat_token_updated_at': None, + 'ble_autopair_enrolled': False, + 'calendar_enabled': True, + 'charge_state_battery_heater_on': False, + 'charge_state_battery_level': 75, + 'charge_state_battery_range': 263.68, + 'charge_state_charge_amps': 32, + 'charge_state_charge_current_request': 32, + 'charge_state_charge_current_request_max': 32, + 'charge_state_charge_enable_request': True, + 'charge_state_charge_energy_added': 18.47, + 'charge_state_charge_limit_soc': 80, + 'charge_state_charge_limit_soc_max': 100, + 'charge_state_charge_limit_soc_min': 50, + 'charge_state_charge_limit_soc_std': 80, + 'charge_state_charge_miles_added_ideal': 84, + 'charge_state_charge_miles_added_rated': 84, + 'charge_state_charge_port_cold_weather_mode': False, + 'charge_state_charge_port_color': '', + 'charge_state_charge_port_door_open': True, + 'charge_state_charge_port_latch': 'Engaged', + 'charge_state_charge_rate': 30.6, + 'charge_state_charger_actual_current': 32, + 'charge_state_charger_phases': 1, + 'charge_state_charger_pilot_current': 32, + 'charge_state_charger_power': 7, + 'charge_state_charger_voltage': 224, + 'charge_state_charging_state': 'Charging', + 'charge_state_conn_charge_cable': 'IEC', + 'charge_state_est_battery_range': 324.73, + 'charge_state_fast_charger_brand': '', + 'charge_state_fast_charger_present': False, + 'charge_state_fast_charger_type': 'ACSingleWireCAN', + 'charge_state_ideal_battery_range': 263.68, + 'charge_state_max_range_charge_counter': 0, + 'charge_state_minutes_to_full_charge': 0, + 'charge_state_not_enough_power_to_heat': None, + 'charge_state_off_peak_charging_enabled': False, + 'charge_state_off_peak_charging_times': 'all_week', + 'charge_state_off_peak_hours_end_time': 900, + 'charge_state_preconditioning_enabled': False, + 'charge_state_preconditioning_times': 'all_week', + 'charge_state_scheduled_charging_mode': 'StartAt', + 'charge_state_scheduled_charging_pending': False, + 'charge_state_scheduled_charging_start_time': 1701216000, + 'charge_state_scheduled_charging_start_time_app': 600, + 'charge_state_scheduled_charging_start_time_minutes': 600, + 'charge_state_scheduled_departure_time': 1694899800, + 'charge_state_scheduled_departure_time_minutes': 450, + 'charge_state_supercharger_session_trip_planner': False, + 'charge_state_time_to_full_charge': 0, + 'charge_state_timestamp': 1701139037461, + 'charge_state_trip_charging': False, + 'charge_state_usable_battery_level': 75, + 'charge_state_user_charge_enable_request': None, + 'climate_state_allow_cabin_overheat_protection': True, + 'climate_state_auto_seat_climate_left': True, + 'climate_state_auto_seat_climate_right': True, + 'climate_state_auto_steering_wheel_heat': True, + 'climate_state_battery_heater': False, + 'climate_state_battery_heater_no_power': None, + 'climate_state_cabin_overheat_protection': 'On', + 'climate_state_cabin_overheat_protection_actively_cooling': False, + 'climate_state_climate_keeper_mode': 'off', + 'climate_state_cop_activation_temperature': 'High', + 'climate_state_defrost_mode': 0, + 'climate_state_driver_temp_setting': 22.5, + 'climate_state_fan_status': 0, + 'climate_state_hvac_auto_request': 'On', + 'climate_state_inside_temp': 30.4, + 'climate_state_is_auto_conditioning_on': False, + 'climate_state_is_climate_on': False, + 'climate_state_is_front_defroster_on': False, + 'climate_state_is_preconditioning': False, + 'climate_state_is_rear_defroster_on': False, + 'climate_state_left_temp_direction': 234, + 'climate_state_max_avail_temp': 28, + 'climate_state_min_avail_temp': 15, + 'climate_state_outside_temp': 30.5, + 'climate_state_passenger_temp_setting': 22.5, + 'climate_state_remote_heater_control_enabled': False, + 'climate_state_right_temp_direction': 234, + 'climate_state_seat_fan_front_left': 0, + 'climate_state_seat_fan_front_right': 0, + 'climate_state_seat_heater_left': 0, + 'climate_state_seat_heater_rear_center': 0, + 'climate_state_seat_heater_rear_left': 0, + 'climate_state_seat_heater_rear_right': 0, + 'climate_state_seat_heater_right': 0, + 'climate_state_side_mirror_heaters': False, + 'climate_state_steering_wheel_heat_level': 0, + 'climate_state_steering_wheel_heater': False, + 'climate_state_supports_fan_only_cabin_overheat_protection': True, + 'climate_state_timestamp': 1701139037461, + 'climate_state_wiper_blade_heater': False, + 'color': None, + 'display_name': 'Test', + 'drive_state_active_route_destination': 'Giga Texas', + 'drive_state_active_route_energy_at_arrival': 65, + 'drive_state_active_route_latitude': '**REDACTED**', + 'drive_state_active_route_longitude': '**REDACTED**', + 'drive_state_active_route_miles_to_arrival': 46.707353, + 'drive_state_active_route_minutes_to_arrival': 59.2, + 'drive_state_active_route_traffic_minutes_delay': 0, + 'drive_state_gps_as_of': 1701129612, + 'drive_state_heading': 185, + 'drive_state_latitude': '**REDACTED**', + 'drive_state_longitude': '**REDACTED**', + 'drive_state_native_latitude': '**REDACTED**', + 'drive_state_native_location_supported': 1, + 'drive_state_native_longitude': '**REDACTED**', + 'drive_state_native_type': 'wgs', + 'drive_state_power': -7, + 'drive_state_shift_state': None, + 'drive_state_speed': None, + 'drive_state_timestamp': 1701139037461, + 'granular_access_hide_private': False, + 'gui_settings_gui_24_hour_time': False, + 'gui_settings_gui_charge_rate_units': 'kW', + 'gui_settings_gui_distance_units': 'km/hr', + 'gui_settings_gui_range_display': 'Rated', + 'gui_settings_gui_temperature_units': 'C', + 'gui_settings_gui_tirepressure_units': 'Psi', + 'gui_settings_show_range_units': False, + 'gui_settings_timestamp': 1701139037461, + 'id': '**REDACTED**', + 'id_s': '**REDACTED**', + 'in_service': False, + 'state': 'online', + 'tokens': '**REDACTED**', + 'user_id': '**REDACTED**', + 'vehicle_config_aux_park_lamps': 'Eu', + 'vehicle_config_badge_version': 1, + 'vehicle_config_can_accept_navigation_requests': True, + 'vehicle_config_can_actuate_trunks': True, + 'vehicle_config_car_special_type': 'base', + 'vehicle_config_car_type': 'model3', + 'vehicle_config_charge_port_type': 'CCS', + 'vehicle_config_cop_user_set_temp_supported': False, + 'vehicle_config_dashcam_clip_save_supported': True, + 'vehicle_config_default_charge_to_max': False, + 'vehicle_config_driver_assist': 'TeslaAP3', + 'vehicle_config_ece_restrictions': False, + 'vehicle_config_efficiency_package': 'M32021', + 'vehicle_config_eu_vehicle': True, + 'vehicle_config_exterior_color': 'DeepBlue', + 'vehicle_config_exterior_trim': 'Black', + 'vehicle_config_exterior_trim_override': '', + 'vehicle_config_has_air_suspension': False, + 'vehicle_config_has_ludicrous_mode': False, + 'vehicle_config_has_seat_cooling': True, + 'vehicle_config_headlamp_type': 'Global', + 'vehicle_config_interior_trim_type': 'White2', + 'vehicle_config_key_version': 2, + 'vehicle_config_motorized_charge_port': True, + 'vehicle_config_paint_color_override': '0,9,25,0.7,0.04', + 'vehicle_config_performance_package': 'Base', + 'vehicle_config_plg': True, + 'vehicle_config_pws': False, + 'vehicle_config_rear_drive_unit': 'PM216MOSFET', + 'vehicle_config_rear_seat_heaters': 1, + 'vehicle_config_rear_seat_type': 0, + 'vehicle_config_rhd': True, + 'vehicle_config_roof_color': 'RoofColorGlass', + 'vehicle_config_seat_type': None, + 'vehicle_config_spoiler_type': 'None', + 'vehicle_config_sun_roof_installed': True, + 'vehicle_config_supports_qr_pairing': False, + 'vehicle_config_third_row_seats': 'None', + 'vehicle_config_timestamp': 1701139037461, + 'vehicle_config_trim_badging': '74d', + 'vehicle_config_use_range_badging': True, + 'vehicle_config_utc_offset': 36000, + 'vehicle_config_webcam_selfie_supported': True, + 'vehicle_config_webcam_supported': True, + 'vehicle_config_wheel_type': 'Pinwheel18CapKit', + 'vehicle_id': '**REDACTED**', + 'vehicle_state_api_version': 67, + 'vehicle_state_autopark_state_v2': 'unavailable', + 'vehicle_state_calendar_supported': True, + 'vehicle_state_car_version': '2023.38.6 c1f85ddb415f', + 'vehicle_state_center_display_state': 0, + 'vehicle_state_dashcam_clip_save_available': True, + 'vehicle_state_dashcam_state': 'Recording', + 'vehicle_state_df': 0, + 'vehicle_state_dr': 0, + 'vehicle_state_fd_window': 0, + 'vehicle_state_feature_bitmask': 'fbdffbff,7f', + 'vehicle_state_fp_window': 0, + 'vehicle_state_ft': 0, + 'vehicle_state_is_user_present': False, + 'vehicle_state_locked': True, + 'vehicle_state_media_info_audio_volume': 2.3333, + 'vehicle_state_media_info_audio_volume_increment': 0.333333, + 'vehicle_state_media_info_audio_volume_max': 10.333333, + 'vehicle_state_media_info_media_playback_status': 'Stopped', + 'vehicle_state_media_info_now_playing_album': '', + 'vehicle_state_media_info_now_playing_artist': '', + 'vehicle_state_media_info_now_playing_duration': 0, + 'vehicle_state_media_info_now_playing_elapsed': 0, + 'vehicle_state_media_info_now_playing_source': '', + 'vehicle_state_media_info_now_playing_station': '', + 'vehicle_state_media_info_now_playing_title': '', + 'vehicle_state_media_state_remote_control_enabled': False, + 'vehicle_state_notifications_supported': True, + 'vehicle_state_odometer': 5454.495383, + 'vehicle_state_parsed_calendar_supported': True, + 'vehicle_state_pf': 0, + 'vehicle_state_pr': 0, + 'vehicle_state_rd_window': 0, + 'vehicle_state_remote_start': False, + 'vehicle_state_remote_start_enabled': True, + 'vehicle_state_remote_start_supported': True, + 'vehicle_state_rp_window': 0, + 'vehicle_state_rt': 0, + 'vehicle_state_santa_mode': 0, + 'vehicle_state_sentry_mode': False, + 'vehicle_state_sentry_mode_available': True, + 'vehicle_state_service_mode': False, + 'vehicle_state_service_mode_plus': False, + 'vehicle_state_software_update_download_perc': 100, + 'vehicle_state_software_update_expected_duration_sec': 2700, + 'vehicle_state_software_update_install_perc': 1, + 'vehicle_state_software_update_status': 'available', + 'vehicle_state_software_update_version': '2023.44.30.4', + 'vehicle_state_speed_limit_mode_active': False, + 'vehicle_state_speed_limit_mode_current_limit_mph': 74.564543, + 'vehicle_state_speed_limit_mode_max_limit_mph': 120, + 'vehicle_state_speed_limit_mode_min_limit_mph': 50, + 'vehicle_state_speed_limit_mode_pin_code_set': True, + 'vehicle_state_timestamp': 1701139037461, + 'vehicle_state_tpms_hard_warning_fl': False, + 'vehicle_state_tpms_hard_warning_fr': False, + 'vehicle_state_tpms_hard_warning_rl': False, + 'vehicle_state_tpms_hard_warning_rr': False, + 'vehicle_state_tpms_last_seen_pressure_time_fl': 1701062077, + 'vehicle_state_tpms_last_seen_pressure_time_fr': 1701062047, + 'vehicle_state_tpms_last_seen_pressure_time_rl': 1701062077, + 'vehicle_state_tpms_last_seen_pressure_time_rr': 1701062047, + 'vehicle_state_tpms_pressure_fl': 2.975, + 'vehicle_state_tpms_pressure_fr': 2.975, + 'vehicle_state_tpms_pressure_rl': 2.95, + 'vehicle_state_tpms_pressure_rr': 2.95, + 'vehicle_state_tpms_rcp_front_value': 2.9, + 'vehicle_state_tpms_rcp_rear_value': 2.9, + 'vehicle_state_tpms_soft_warning_fl': False, + 'vehicle_state_tpms_soft_warning_fr': False, + 'vehicle_state_tpms_soft_warning_rl': False, + 'vehicle_state_tpms_soft_warning_rr': False, + 'vehicle_state_valet_mode': False, + 'vehicle_state_valet_pin_needed': False, + 'vehicle_state_vehicle_name': 'Test', + 'vehicle_state_vehicle_self_test_progress': 0, + 'vehicle_state_vehicle_self_test_requested': False, + 'vehicle_state_webcam_available': True, + 'vin': '**REDACTED**', + }), + }), + ]), + }) +# --- diff --git a/tests/components/tessie/snapshots/test_number.ambr b/tests/components/tessie/snapshots/test_number.ambr index c91fb74adeb..6e641bdf5b7 100644 --- a/tests/components/tessie/snapshots/test_number.ambr +++ b/tests/components/tessie/snapshots/test_number.ambr @@ -1,4 +1,120 @@ # serializer version: 1 +# name: test_numbers[number.energy_site_backup_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_backup_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Backup reserve', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'backup_reserve_percent', + 'unique_id': '123456-backup_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_numbers[number.energy_site_backup_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Backup reserve', + 'icon': 'mdi:battery-alert', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_backup_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_numbers[number.energy_site_off_grid_reserve-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.energy_site_off_grid_reserve', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-unknown', + 'original_name': 'Off grid reserve', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_vehicle_charging_reserve_percent', + 'unique_id': '123456-off_grid_vehicle_charging_reserve_percent', + 'unit_of_measurement': '%', + }) +# --- +# name: test_numbers[number.energy_site_off_grid_reserve-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Energy Site Off grid reserve', + 'icon': 'mdi:battery-unknown', + 'max': 100, + 'min': 0, + 'mode': , + 'step': 1, + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'number.energy_site_off_grid_reserve', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_numbers[number.test_charge_current-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_select.ambr b/tests/components/tessie/snapshots/test_select.ambr index fc076aabf14..acc1946aab5 100644 --- a/tests/components/tessie/snapshots/test_select.ambr +++ b/tests/components/tessie/snapshots/test_select.ambr @@ -1,4 +1,236 @@ # serializer version: 1 +# name: test_select[select.energy_site_allow_export-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_allow_export', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow export', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_customer_preferred_export_rule', + 'unique_id': '123456-components_customer_preferred_export_rule', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_allow_export-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Allow export', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_allow_export', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'pv_only', + }) +# --- +# name: test_select[select.energy_site_operation_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.energy_site_operation_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Operation mode', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'default_real_mode', + 'unique_id': '123456-default_real_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.energy_site_operation_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Operation mode', + 'options': list([ + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.energy_site_operation_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'self_consumption', + }) +# --- +# name: test_select[select.test_seat_cooler_left-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_left', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler left', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_left', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_left', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_left-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler left', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_left', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_select[select.test_seat_cooler_right-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': None, + 'entity_id': 'select.test_seat_cooler_right', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Seat cooler right', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_seat_fan_front_right', + 'unique_id': 'VINVINVIN-climate_state_seat_fan_front_right', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[select.test_seat_cooler_right-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Test Seat cooler right', + 'options': list([ + , + , + , + , + ]), + }), + 'context': , + 'entity_id': 'select.test_seat_cooler_right', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_select[select.test_seat_heater_left-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_switch.ambr b/tests/components/tessie/snapshots/test_switch.ambr index db06e028198..3b7a3623de8 100644 --- a/tests/components/tessie/snapshots/test_switch.ambr +++ b/tests/components/tessie/snapshots/test_switch.ambr @@ -1,4 +1,96 @@ # serializer version: 1 +# name: test_switches[switch.energy_site_allow_charging_from_grid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Allow charging from grid', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'components_disallow_charge_from_grid_with_solar_installed', + 'unique_id': '123456-components_disallow_charge_from_grid_with_solar_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.energy_site_allow_charging_from_grid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Allow charging from grid', + }), + 'context': , + 'entity_id': 'switch.energy_site_allow_charging_from_grid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch.energy_site_storm_watch-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.energy_site_storm_watch', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Storm watch', + 'platform': 'tessie', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'user_settings_storm_mode_enabled', + 'unique_id': '123456-user_settings_storm_mode_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.energy_site_storm_watch-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Energy Site Storm watch', + }), + 'context': , + 'entity_id': 'switch.energy_site_storm_watch', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_switches[switch.test_charge-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index f3dc98e6e18..043086971fa 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -67,6 +67,33 @@ async def test_form( assert result2["data"] == TEST_CONFIG +async def test_abort( + hass: HomeAssistant, + mock_config_flow_get_state_of_all_vehicles, + mock_async_setup_entry, +) -> None: + """Test a duplicate entry aborts.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + data=TEST_CONFIG, + ) + mock_entry.add_to_hass(hass) + + result1 = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + result2 = await hass.config_entries.flow.async_configure( + result1["flow_id"], + TEST_CONFIG, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + @pytest.mark.parametrize( ("side_effect", "error"), [ diff --git a/tests/components/tessie/test_cover.py b/tests/components/tessie/test_cover.py index b731add10f8..be4dda3ec7b 100644 --- a/tests/components/tessie/test_cover.py +++ b/tests/components/tessie/test_cover.py @@ -42,6 +42,7 @@ async def test_covers( ("cover.test_charge_port_door", "open_unlock_charge_port", "close_charge_port"), ("cover.test_frunk", "open_front_trunk", False), ("cover.test_trunk", "open_close_rear_trunk", "open_close_rear_trunk"), + ("cover.test_sunroof", "vent_sunroof", "close_sunroof"), ): # Test open windows if openfunc: diff --git a/tests/components/tessie/test_diagnostics.py b/tests/components/tessie/test_diagnostics.py new file mode 100644 index 00000000000..5f60c1a06ca --- /dev/null +++ b/tests/components/tessie/test_diagnostics.py @@ -0,0 +1,23 @@ +"""Test the Tessie Diagnostics.""" + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .common import setup_platform + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + entry = await setup_platform(hass) + + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + assert diag == snapshot diff --git a/tests/components/tessie/test_number.py b/tests/components/tessie/test_number.py index 8a3d1a649c7..0fb13779183 100644 --- a/tests/components/tessie/test_number.py +++ b/tests/components/tessie/test_number.py @@ -4,12 +4,16 @@ from unittest.mock import patch from syrupy import SnapshotAssertion -from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import assert_entities, setup_platform +from .common import TEST_RESPONSE, assert_entities, setup_platform async def test_numbers( @@ -29,7 +33,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 16}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 16}, blocking=True, ) mock_set_charging_amps.assert_called_once() @@ -42,7 +46,7 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 80}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 80}, blocking=True, ) mock_set_charge_limit.assert_called_once() @@ -55,8 +59,41 @@ async def test_numbers( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: [entity_id], "value": 60}, + {ATTR_ENTITY_ID: [entity_id], ATTR_VALUE: 60}, blocking=True, ) mock_set_speed_limit.assert_called_once() assert hass.states.get(entity_id).state == "60.0" + + entity_id = "number.energy_site_backup_reserve" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.backup", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: entity_id, + ATTR_VALUE: 80, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "80" + call.assert_called_once() + + entity_id = "number.energy_site_off_grid_reserve" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.off_grid_vehicle_charging_reserve", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 88}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == "88" + call.assert_called_once() diff --git a/tests/components/tessie/test_select.py b/tests/components/tessie/test_select.py index f9526bf0a47..c78923fbf5b 100644 --- a/tests/components/tessie/test_select.py +++ b/tests/components/tessie/test_select.py @@ -4,12 +4,17 @@ from unittest.mock import patch import pytest from syrupy import SnapshotAssertion +from tesla_fleet_api.const import EnergyExportMode, EnergyOperationMode +from tesla_fleet_api.exceptions import UnsupportedVehicle from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) -from homeassistant.components.tessie.const import TessieSeatHeaterOptions +from homeassistant.components.tessie.const import ( + TessieSeatCoolerOptions, + TessieSeatHeaterOptions, +) from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -27,9 +32,8 @@ async def test_select( assert_entities(hass, entry.entry_id, entity_registry, snapshot) - entity_id = "select.test_seat_heater_left" - # Test changing select + entity_id = "select.test_seat_heater_left" with patch( "homeassistant.components.tessie.select.set_seat_heat", return_value=TEST_RESPONSE, @@ -45,14 +49,64 @@ async def test_select( assert mock_set.call_args[1]["level"] == 1 assert hass.states.get(entity_id) == snapshot(name=SERVICE_SELECT_OPTION) + # Test site operation mode + entity_id = "select.energy_site_operation_mode" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.operation", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: entity_id, + ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, + }, + blocking=True, + ) + assert (state := hass.states.get(entity_id)) + assert state.state == EnergyOperationMode.AUTONOMOUS.value + call.assert_called_once() + + # Test site export mode + entity_id = "select.energy_site_allow_export" + with patch( + "homeassistant.components.teslemetry.EnergySpecific.grid_import_export", + return_value=TEST_RESPONSE, + ) as call: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, ATTR_OPTION: EnergyExportMode.BATTERY_OK.value}, + blocking=True, + ) + assert (state := hass.states.get(entity_id)) + assert state.state == EnergyExportMode.BATTERY_OK.value + call.assert_called_once() + + # Test changing select + entity_id = "select.test_seat_cooler_left" + with patch( + "homeassistant.components.tessie.select.set_seat_cool", + return_value=TEST_RESPONSE, + ) as mock_set: + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatCoolerOptions.LOW}, + blocking=True, + ) + mock_set.assert_called_once() + assert mock_set.call_args[1]["seat"] == "front_left" + assert mock_set.call_args[1]["level"] == 1 + async def test_errors(hass: HomeAssistant) -> None: """Tests unknown error is handled.""" await setup_platform(hass, [Platform.SELECT]) - entity_id = "select.test_seat_heater_left" - # Test setting cover open with unknown error + # Test changing vehicle select with unknown error with ( patch( "homeassistant.components.tessie.select.set_seat_heat", @@ -63,8 +117,31 @@ async def test_errors(hass: HomeAssistant) -> None: await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: [entity_id], ATTR_OPTION: TessieSeatHeaterOptions.LOW}, + { + ATTR_ENTITY_ID: ["select.test_seat_heater_left"], + ATTR_OPTION: TessieSeatHeaterOptions.LOW, + }, blocking=True, ) mock_set.assert_called_once() assert error.value.__cause__ == ERROR_UNKNOWN + + # Test changing energy select with unknown error + with ( + patch( + "homeassistant.components.tessie.EnergySpecific.operation", + side_effect=UnsupportedVehicle, + ) as mock_set, + pytest.raises(HomeAssistantError) as error, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: ["select.energy_site_operation_mode"], + ATTR_OPTION: EnergyOperationMode.AUTONOMOUS.value, + }, + blocking=True, + ) + mock_set.assert_called_once() + assert isinstance(error.value.__cause__, UnsupportedVehicle) diff --git a/tests/components/tessie/test_switch.py b/tests/components/tessie/test_switch.py index 907be29ddcc..499e529b2e8 100644 --- a/tests/components/tessie/test_switch.py +++ b/tests/components/tessie/test_switch.py @@ -2,6 +2,7 @@ from unittest.mock import patch +import pytest from syrupy import SnapshotAssertion from homeassistant.components.switch import ( @@ -9,11 +10,11 @@ from homeassistant.components.switch import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) -from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .common import assert_entities, setup_platform +from .common import RESPONSE_OK, assert_entities, setup_platform async def test_switches( @@ -52,3 +53,56 @@ async def test_switches( mock_stop_charging.assert_called_once() assert hass.states.get(entity_id) == snapshot(name=SERVICE_TURN_OFF) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("name", "on", "off"), + [ + ( + "energy_site_storm_watch", + "EnergySpecific.storm_mode", + "EnergySpecific.storm_mode", + ), + ( + "energy_site_allow_charging_from_grid", + "EnergySpecific.grid_import_export", + "EnergySpecific.grid_import_export", + ), + ], +) +async def test_switch_services( + hass: HomeAssistant, name: str, on: str, off: str +) -> None: + """Tests that the switch service calls work.""" + + await setup_platform(hass, [Platform.SWITCH]) + + entity_id = f"switch.{name}" + with patch( + f"homeassistant.components.teslemetry.{on}", + return_value=RESPONSE_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_ON + call.assert_called_once() + + with patch( + f"homeassistant.components.teslemetry.{off}", + return_value=RESPONSE_OK, + ) as call: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == STATE_OFF + call.assert_called_once() diff --git a/tests/components/tibber/conftest.py b/tests/components/tibber/conftest.py index fc6596444c5..0b48531bde1 100644 --- a/tests/components/tibber/conftest.py +++ b/tests/components/tibber/conftest.py @@ -27,7 +27,7 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture async def mock_tibber_setup( config_entry: MockConfigEntry, hass: HomeAssistant -) -> AsyncGenerator[None, MagicMock]: +) -> AsyncGenerator[MagicMock]: """Mock tibber entry setup.""" unique_user_id = "unique_user_id" title = "title" diff --git a/tests/components/tibber/test_services.py b/tests/components/tibber/test_services.py index fe437e421d7..e9bee3ba31f 100644 --- a/tests/components/tibber/test_services.py +++ b/tests/components/tibber/test_services.py @@ -4,6 +4,7 @@ import asyncio import datetime as dt from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.tibber.const import DOMAIN @@ -11,11 +12,12 @@ from homeassistant.components.tibber.services import PRICE_SERVICE_NAME, __get_p from homeassistant.core import ServiceCall from homeassistant.exceptions import ServiceValidationError +STARTTIME = dt.datetime.fromtimestamp(1615766400) + def generate_mock_home_data(): """Create mock data from the tibber connection.""" - today = remove_microseconds(dt.datetime.now()) - tomorrow = remove_microseconds(today + dt.timedelta(days=1)) + tomorrow = STARTTIME + dt.timedelta(days=1) mock_homes = [ MagicMock( name="first_home", @@ -26,13 +28,13 @@ def generate_mock_home_data(): "priceInfo": { "today": [ { - "startsAt": today.isoformat(), + "startsAt": STARTTIME.isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", }, { "startsAt": ( - today + dt.timedelta(hours=1) + STARTTIME + dt.timedelta(hours=1) ).isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", @@ -67,13 +69,13 @@ def generate_mock_home_data(): "priceInfo": { "today": [ { - "startsAt": today.isoformat(), + "startsAt": STARTTIME.isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", }, { "startsAt": ( - today + dt.timedelta(hours=1) + STARTTIME + dt.timedelta(hours=1) ).isoformat(), "total": 0.46914, "level": "VERY_EXPENSIVE", @@ -119,19 +121,16 @@ def create_mock_hass(): return mock_hass -def remove_microseconds(dt): - """Remove microseconds from a datetime object.""" - return dt.replace(microsecond=0) - - -async def test_get_prices(): +async def test_get_prices( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with mock data.""" - today = remove_microseconds(dt.datetime.now()) - tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + freezer.move_to(STARTTIME) + tomorrow = STARTTIME + dt.timedelta(days=1) call = ServiceCall( DOMAIN, PRICE_SERVICE_NAME, - {"start": today.date().isoformat(), "end": tomorrow.date().isoformat()}, + {"start": STARTTIME.date().isoformat(), "end": tomorrow.date().isoformat()}, ) result = await __get_prices(call, hass=create_mock_hass()) @@ -140,24 +139,24 @@ async def test_get_prices(): "prices": { "first_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -166,9 +165,11 @@ async def test_get_prices(): } -async def test_get_prices_no_input(): +async def test_get_prices_no_input( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with no input.""" - today = remove_microseconds(dt.datetime.now()) + freezer.move_to(STARTTIME) call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {}) result = await __get_prices(call, hass=create_mock_hass()) @@ -177,24 +178,24 @@ async def test_get_prices_no_input(): "prices": { "first_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, ], "second_home": [ { - "start_time": today, + "start_time": STARTTIME, "price": 0.46914, "level": "VERY_EXPENSIVE", }, { - "start_time": today + dt.timedelta(hours=1), + "start_time": STARTTIME + dt.timedelta(hours=1), "price": 0.46914, "level": "VERY_EXPENSIVE", }, @@ -203,9 +204,12 @@ async def test_get_prices_no_input(): } -async def test_get_prices_start_tomorrow(): +async def test_get_prices_start_tomorrow( + freezer: FrozenDateTimeFactory, +) -> None: """Test __get_prices with start date tomorrow.""" - tomorrow = remove_microseconds(dt.datetime.now() + dt.timedelta(days=1)) + freezer.move_to(STARTTIME) + tomorrow = STARTTIME + dt.timedelta(days=1) call = ServiceCall( DOMAIN, PRICE_SERVICE_NAME, {"start": tomorrow.date().isoformat()} ) @@ -242,7 +246,7 @@ async def test_get_prices_start_tomorrow(): } -async def test_get_prices_invalid_input(): +async def test_get_prices_invalid_input() -> None: """Test __get_prices with invalid input.""" call = ServiceCall(DOMAIN, PRICE_SERVICE_NAME, {"start": "test"}) diff --git a/tests/components/time_date/conftest.py b/tests/components/time_date/conftest.py index 4bcaa887b6f..7841b6d0b83 100644 --- a/tests/components/time_date/conftest.py +++ b/tests/components/time_date/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Time & Date integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/todo/test_init.py b/tests/components/todo/test_init.py index 5999b4b9fbe..b62505b14b4 100644 --- a/tests/components/todo/test_init.py +++ b/tests/components/todo/test_init.py @@ -1,26 +1,33 @@ """Tests for the todo integration.""" +from collections.abc import Generator import datetime from typing import Any from unittest.mock import AsyncMock import zoneinfo import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import conversation from homeassistant.components.homeassistant.exposed_entities import async_expose_entity from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, DOMAIN, TodoItem, TodoItemStatus, TodoListEntity, TodoListEntityFeature, + TodoServices, intent as todo_intent, ) from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow -from homeassistant.const import Platform +from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import intent @@ -230,11 +237,11 @@ async def test_list_todo_items( [ ({}, [ITEM_1, ITEM_2]), ( - {"status": [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, + {ATTR_STATUS: [TodoItemStatus.COMPLETED, TodoItemStatus.NEEDS_ACTION]}, [ITEM_1, ITEM_2], ), - ({"status": [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), - ({"status": [TodoItemStatus.COMPLETED]}, [ITEM_2]), + ({ATTR_STATUS: [TodoItemStatus.NEEDS_ACTION]}, [ITEM_1]), + ({ATTR_STATUS: [TodoItemStatus.COMPLETED]}, [ITEM_2]), ], ) async def test_get_items_service( @@ -251,13 +258,13 @@ async def test_get_items_service( state = hass.states.get("todo.entity1") assert state assert state.state == "1" - assert state.attributes == {"supported_features": 15} + assert state.attributes == {ATTR_SUPPORTED_FEATURES: 15} result = await hass.services.async_call( DOMAIN, - "get_items", + TodoServices.GET_ITEMS, service_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) @@ -297,9 +304,9 @@ async def test_add_item_service( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -324,9 +331,9 @@ async def test_add_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -335,21 +342,21 @@ async def test_add_item_service_raises( ("item_data", "expected_exception", "expected_error"), [ ({}, vol.Invalid, "required key not provided"), - ({"item": ""}, vol.Invalid, "length of value must be at least 1"), + ({ATTR_ITEM: ""}, vol.Invalid, "length of value must be at least 1"), ( - {"item": "Submit forms", "description": "Submit tax forms"}, + {ATTR_ITEM: "Submit forms", ATTR_DESCRIPTION: "Submit tax forms"}, ServiceValidationError, "does not support setting field: description", ), ( - {"item": "Submit forms", "due_date": "2023-11-17"}, + {ATTR_ITEM: "Submit forms", ATTR_DUE_DATE: "2023-11-17"}, ServiceValidationError, "does not support setting field: due_date", ), ( { - "item": "Submit forms", - "due_datetime": f"2023-11-17T17:00:00{TEST_OFFSET}", + ATTR_ITEM: "Submit forms", + ATTR_DUE_DATETIME: f"2023-11-17T17:00:00{TEST_OFFSET}", }, ServiceValidationError, "does not support setting field: due_datetime", @@ -370,9 +377,9 @@ async def test_add_item_service_invalid_input( with pytest.raises(expected_exception) as exc: await hass.services.async_call( DOMAIN, - "add_item", + TodoServices.ADD_ITEM, item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -384,7 +391,7 @@ async def test_add_item_service_invalid_input( [ ( TodoListEntityFeature.SET_DUE_DATE_ON_ITEM, - {"item": "New item", "due_date": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATE: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -393,7 +400,10 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": f"2023-11-13T17:00:00{TEST_OFFSET}"}, + { + ATTR_ITEM: "New item", + ATTR_DUE_DATETIME: f"2023-11-13T17:00:00{TEST_OFFSET}", + }, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -402,7 +412,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13T17:00:00+00:00"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13T17:00:00+00:00"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -411,7 +421,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DUE_DATETIME_ON_ITEM, - {"item": "New item", "due_datetime": "2023-11-13"}, + {ATTR_ITEM: "New item", ATTR_DUE_DATETIME: "2023-11-13"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -420,7 +430,7 @@ async def test_add_item_service_invalid_input( ), ( TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM, - {"item": "New item", "description": "Submit revised draft"}, + {ATTR_ITEM: "New item", ATTR_DESCRIPTION: "Submit revised draft"}, TodoItem( summary="New item", status=TodoItemStatus.NEEDS_ACTION, @@ -443,9 +453,9 @@ async def test_add_item_service_extended_fields( await hass.services.async_call( DOMAIN, - "add_item", - {"item": "New item", **item_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "New item", **item_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -465,9 +475,9 @@ async def test_update_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_RENAME: "Updated item", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -490,9 +500,9 @@ async def test_update_todo_item_service_by_id_status_only( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -515,9 +525,9 @@ async def test_update_todo_item_service_by_id_rename( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -540,9 +550,9 @@ async def test_update_todo_item_service_raises( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -550,9 +560,9 @@ async def test_update_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", "rename": "Updated item", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", "rename": "Updated item", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -567,9 +577,9 @@ async def test_update_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -592,9 +602,9 @@ async def test_update_todo_item_service_by_summary_only_status( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #1", "rename": "Something else"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #1", "rename": "Something else"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -618,9 +628,9 @@ async def test_update_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "Item #7", "status": "completed"}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Item #7", "status": "completed"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -652,7 +662,7 @@ async def test_update_item_service_invalid_input( DOMAIN, "update_item", item_data, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -677,9 +687,9 @@ async def test_update_todo_item_field_unsupported( with pytest.raises(ServiceValidationError, match="does not support"): await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -733,9 +743,9 @@ async def test_update_todo_item_extended_fields( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -823,9 +833,9 @@ async def test_update_todo_item_extended_fields_overwrite_existing_values( await hass.services.async_call( DOMAIN, - "update_item", - {"item": "1", **update_data}, - target={"entity_id": "todo.entity1"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "1", **update_data}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -845,9 +855,9 @@ async def test_remove_todo_item_service_by_id( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -868,9 +878,9 @@ async def test_remove_todo_item_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["1", "2"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["1", "2"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -888,9 +898,9 @@ async def test_remove_todo_item_service_invalid_input( ): await hass.services.async_call( DOMAIN, - "remove_item", + TodoServices.REMOVE_ITEM, {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -905,9 +915,9 @@ async def test_remove_todo_item_service_by_summary( await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #1"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #1"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -927,9 +937,9 @@ async def test_remove_todo_item_service_by_summary_not_found( with pytest.raises(ServiceValidationError, match="Unable to find"): await hass.services.async_call( DOMAIN, - "remove_item", - {"item": ["Item #7"]}, - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["Item #7"]}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1035,26 +1045,26 @@ async def test_move_todo_item_service_invalid_input( ("service_name", "payload"), [ ( - "add_item", + TodoServices.ADD_ITEM, { - "item": "New item", + ATTR_ITEM: "New item", }, ), ( - "remove_item", + TodoServices.REMOVE_ITEM, { - "item": ["1"], + ATTR_ITEM: ["1"], }, ), ( - "update_item", + TodoServices.UPDATE_ITEM, { - "item": "1", - "rename": "Updated item", + ATTR_ITEM: "1", + ATTR_RENAME: "Updated item", }, ), ( - "remove_completed_items", + TodoServices.REMOVE_COMPLETED_ITEMS, None, ), ], @@ -1078,7 +1088,7 @@ async def test_unsupported_service( DOMAIN, service_name, payload, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1131,7 +1141,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "beer"}, "name": {"value": "list 1"}}, + {ATTR_ITEM: {"value": "beer"}, "name": {"value": "list 1"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1147,7 +1157,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "cheese"}, "name": {"value": "List 2"}}, + {ATTR_ITEM: {"value": "cheese"}, "name": {"value": "List 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1162,7 +1172,7 @@ async def test_add_item_intent( hass, "test", todo_intent.INTENT_LIST_ADD_ITEM, - {"item": {"value": "wine"}, "name": {"value": "lIST 2"}}, + {ATTR_ITEM: {"value": "wine"}, "name": {"value": "lIST 2"}}, assistant=conversation.DOMAIN, ) assert response.response_type == intent.IntentResponseType.ACTION_DONE @@ -1224,8 +1234,8 @@ async def test_remove_completed_items_service( await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1238,8 +1248,8 @@ async def test_remove_completed_items_service( # calling service multiple times will not call the entity method await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) test_entity.async_delete_todo_items.assert_not_called() @@ -1257,8 +1267,8 @@ async def test_remove_completed_items_service_raises( with pytest.raises(HomeAssistantError, match="Ooops"): await hass.services.async_call( DOMAIN, - "remove_completed_items", - target={"entity_id": "todo.entity1"}, + TodoServices.REMOVE_COMPLETED_ITEMS, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, ) @@ -1423,7 +1433,7 @@ async def test_list_todo_items_extended_fields( DOMAIN, "get_items", {}, - target={"entity_id": "todo.entity1"}, + target={ATTR_ENTITY_ID: "todo.entity1"}, blocking=True, return_response=True, ) diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 386385a0ddb..45fda53ccc1 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for the todoist tests.""" +from collections.abc import Generator from http import HTTPStatus from unittest.mock import AsyncMock, patch @@ -7,7 +8,6 @@ import pytest from requests.exceptions import HTTPError from requests.models import Response from todoist_api_python.models import Collaborator, Due, Label, Project, Task -from typing_extensions import Generator from homeassistant.components.todoist import DOMAIN from homeassistant.const import CONF_TOKEN, Platform diff --git a/tests/components/todoist/test_todo.py b/tests/components/todoist/test_todo.py index 2aabfcc5755..1c2da67fb02 100644 --- a/tests/components/todoist/test_todo.py +++ b/tests/components/todoist/test_todo.py @@ -6,8 +6,17 @@ from unittest.mock import AsyncMock import pytest from todoist_api_python.models import Due, Task -from homeassistant.components.todo import DOMAIN as TODO_DOMAIN -from homeassistant.const import Platform +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_DUE_DATE, + ATTR_DUE_DATETIME, + ATTR_ITEM, + ATTR_RENAME, + ATTR_STATUS, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_component import async_update_entity @@ -86,7 +95,7 @@ async def test_todo_item_state( ), ( [], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -105,7 +114,7 @@ async def test_todo_item_state( ), ( [], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -132,7 +141,7 @@ async def test_todo_item_state( ), ( [], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -173,9 +182,9 @@ async def test_add_todo_list_item( await hass.services.async_call( TODO_DOMAIN, - "add_item", - {"item": "Soda", **item_data}, - target={"entity_id": "todo.name"}, + TodoServices.ADD_ITEM, + {ATTR_ITEM: "Soda", **item_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) @@ -190,9 +199,9 @@ async def test_add_todo_list_item( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -223,9 +232,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "completed"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "completed"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.close_task.called @@ -246,9 +255,9 @@ async def test_update_todo_item_status( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", "status": "needs_action"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", ATTR_STATUS: "needs_action"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.reopen_task.called @@ -274,7 +283,7 @@ async def test_update_todo_item_status( description="desc", ) ], - {"rename": "Milk"}, + {ATTR_RENAME: "Milk"}, [ make_api_task( id="task-id-1", @@ -298,7 +307,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_date": "2023-11-18"}, + {ATTR_DUE_DATE: "2023-11-18"}, [ make_api_task( id="task-id-1", @@ -322,7 +331,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"due_datetime": "2023-11-18T06:30:00"}, + {ATTR_DUE_DATETIME: "2023-11-18T06:30:00"}, [ make_api_task( id="task-id-1", @@ -351,7 +360,7 @@ async def test_update_todo_item_status( ), ( [make_api_task(id="task-id-1", content="Soda", is_completed=False)], - {"description": "6-pack"}, + {ATTR_DESCRIPTION: "6-pack"}, [ make_api_task( id="task-id-1", @@ -382,7 +391,7 @@ async def test_update_todo_item_status( is_completed=False, ) ], - {"description": None}, + {ATTR_DESCRIPTION: None}, [ make_api_task( id="task-id-1", @@ -415,7 +424,7 @@ async def test_update_todo_item_status( due=Due(date="2024-01-01", is_recurring=True, string="every day"), ) ], - {"due_date": "2024-02-01"}, + {ATTR_DUE_DATE: "2024-02-01"}, [ make_api_task( id="task-id-1", @@ -472,9 +481,9 @@ async def test_update_todo_items( await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "task-id-1", **update_data}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "task-id-1", **update_data}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.update_task.called @@ -484,9 +493,9 @@ async def test_update_todo_items( result = await hass.services.async_call( TODO_DOMAIN, - "get_items", + TodoServices.GET_ITEMS, {}, - target={"entity_id": "todo.name"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, return_response=True, ) @@ -519,9 +528,9 @@ async def test_remove_todo_item( await hass.services.async_call( TODO_DOMAIN, - "remove_item", - {"item": ["task-id-1", "task-id-2"]}, - target={"entity_id": "todo.name"}, + TodoServices.REMOVE_ITEM, + {ATTR_ITEM: ["task-id-1", "task-id-2"]}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) assert api.delete_task.call_count == 2 @@ -575,9 +584,9 @@ async def test_subscribe( ] await hass.services.async_call( TODO_DOMAIN, - "update_item", - {"item": "Cheese", "rename": "Wine"}, - target={"entity_id": "todo.name"}, + TodoServices.UPDATE_ITEM, + {ATTR_ITEM: "Cheese", ATTR_RENAME: "Wine"}, + target={ATTR_ENTITY_ID: "todo.name"}, blocking=True, ) diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index 54089c6f192..81cfecbc530 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -847,6 +847,101 @@ 'state': 'off', }) # --- +# name: test_entity_registry[binary_sensor.test_carbon_monoxide-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_carbon_monoxide', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Carbon monoxide', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_carbon_monoxide', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_carbon_monoxide-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'carbon_monoxide', + 'friendly_name': 'test Carbon monoxide', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_carbon_monoxide', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_registry[binary_sensor.test_police_emergency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_police_emergency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Police emergency', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'police', + 'unique_id': '123456_police', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_police_emergency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'test Police emergency', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_police_emergency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_entity_registry[binary_sensor.test_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -895,6 +990,54 @@ 'state': 'off', }) # --- +# name: test_entity_registry[binary_sensor.test_smoke-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.test_smoke', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Smoke', + 'platform': 'totalconnect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '123456_smoke', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_registry[binary_sensor.test_smoke-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'smoke', + 'friendly_name': 'test Smoke', + 'location_id': '123456', + }), + 'context': , + 'entity_id': 'binary_sensor.test_smoke', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- # name: test_entity_registry[binary_sensor.test_tamper-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index b1256f437e7..ee4530575ce 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -1,10 +1,10 @@ """tplink conftest.""" +from collections.abc import Generator import copy from unittest.mock import DEFAULT, AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tplink import DOMAIN from homeassistant.core import HomeAssistant @@ -22,7 +22,7 @@ from . import ( _mocked_device, ) -from tests.common import MockConfigEntry, mock_device_registry, mock_registry +from tests.common import MockConfigEntry @pytest.fixture @@ -77,18 +77,6 @@ def mock_connect(): yield {"connect": mock_connect, "mock_devices": devices} -@pytest.fixture(name="device_reg") -def device_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_device_registry(hass) - - -@pytest.fixture(name="entity_reg") -def entity_reg_fixture(hass): - """Return an empty, loaded, registry.""" - return mock_registry(hass) - - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/tplink/snapshots/test_binary_sensor.ambr b/tests/components/tplink/snapshots/test_binary_sensor.ambr index b45494d1001..cded74da363 100644 --- a/tests/components/tplink/snapshots/test_binary_sensor.ambr +++ b/tests/components/tplink/snapshots/test_binary_sensor.ambr @@ -359,6 +359,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_button.ambr b/tests/components/tplink/snapshots/test_button.ambr index 0167256877d..d6019861804 100644 --- a/tests/components/tplink/snapshots/test_button.ambr +++ b/tests/components/tplink/snapshots/test_button.ambr @@ -117,6 +117,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_climate.ambr b/tests/components/tplink/snapshots/test_climate.ambr index 4bdfe52b9b1..ad863fc79ae 100644 --- a/tests/components/tplink/snapshots/test_climate.ambr +++ b/tests/components/tplink/snapshots/test_climate.ambr @@ -84,6 +84,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'thermostat', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_fan.ambr b/tests/components/tplink/snapshots/test_fan.ambr index 0a51909affe..1a7392dc63a 100644 --- a/tests/components/tplink/snapshots/test_fan.ambr +++ b/tests/components/tplink/snapshots/test_fan.ambr @@ -28,7 +28,7 @@ 'original_name': None, 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH', 'unit_of_measurement': None, @@ -42,7 +42,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device', @@ -81,7 +81,7 @@ 'original_name': 'my_fan_0', 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH00', 'unit_of_measurement': None, @@ -95,7 +95,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device_my_fan_0', @@ -134,7 +134,7 @@ 'original_name': 'my_fan_1', 'platform': 'tplink', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '123456789ABCDEFGH01', 'unit_of_measurement': None, @@ -148,7 +148,7 @@ 'percentage_step': 25.0, 'preset_mode': None, 'preset_modes': None, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.my_device_my_fan_1', @@ -184,6 +184,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_number.ambr b/tests/components/tplink/snapshots/test_number.ambr index 8cda0a728b3..ee06314ffe3 100644 --- a/tests/components/tplink/snapshots/test_number.ambr +++ b/tests/components/tplink/snapshots/test_number.ambr @@ -25,6 +25,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_select.ambr b/tests/components/tplink/snapshots/test_select.ambr index 555b0eb74d1..c851979f34c 100644 --- a/tests/components/tplink/snapshots/test_select.ambr +++ b/tests/components/tplink/snapshots/test_select.ambr @@ -25,6 +25,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_sensor.ambr b/tests/components/tplink/snapshots/test_sensor.ambr index 9ea22af45fd..e639540e552 100644 --- a/tests/components/tplink/snapshots/test_sensor.ambr +++ b/tests/components/tplink/snapshots/test_sensor.ambr @@ -25,6 +25,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/snapshots/test_switch.ambr b/tests/components/tplink/snapshots/test_switch.ambr index 65eead6ddf4..4354ea1905a 100644 --- a/tests/components/tplink/snapshots/test_switch.ambr +++ b/tests/components/tplink/snapshots/test_switch.ambr @@ -25,6 +25,7 @@ }), 'manufacturer': 'TP-Link', 'model': 'HS100', + 'model_id': None, 'name': 'my_device', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index a80a74a5697..2f24fa829f9 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -120,12 +120,13 @@ async def test_set_temperature( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device ) -> None: """Test that set_temperature service calls the setter.""" + mocked_thermostat = mocked_hub.children[0] + mocked_thermostat.features["target_temperature"].minimum_value = 0 + await setup_platform_for_device( hass, mock_config_entry, Platform.CLIMATE, mocked_hub ) - mocked_thermostat = mocked_hub.children[0] - await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index 5b3cf648b6e..986aaebd170 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -107,7 +107,7 @@ async def test_config_entry_retry(hass: HomeAssistant) -> None: async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( - hass: HomeAssistant, entity_reg: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: """Test no migration happens if the original entity id still exists.""" config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=MAC_ADDRESS) @@ -115,14 +115,14 @@ async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( dimmer = _mocked_device(alias="My dimmer", modules=[Module.Light]) rollout_unique_id = MAC_ADDRESS.replace(":", "").upper() original_unique_id = tplink.legacy_device_id(dimmer) - original_dimmer_entity_reg = entity_reg.async_get_or_create( + original_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", unique_id=original_unique_id, original_name="Original dimmer", ) - rollout_dimmer_entity_reg = entity_reg.async_get_or_create( + rollout_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", @@ -138,7 +138,7 @@ async def test_dimmer_switch_unique_id_fix_original_entity_still_exists( await setup.async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done(wait_background_tasks=True) - migrated_dimmer_entity_reg = entity_reg.async_get_or_create( + migrated_dimmer_entity_reg = entity_registry.async_get_or_create( config_entry=config_entry, platform=DOMAIN, domain="light", diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 590274b8405..6998d8fbcc7 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -505,7 +505,9 @@ async def test_dimmer_turn_on_fix(hass: HomeAssistant) -> None: light.set_state.reset_mock() -async def test_smart_strip_effects(hass: HomeAssistant) -> None: +async def test_smart_strip_effects( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test smart strip effects.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS @@ -555,6 +557,40 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: "Effect2", brightness=None, transition=None ) light_effect.set_effect.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "Effect2" + + # Test setting light effect off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "off"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + light.set_state.assert_not_called() + + # Test setting light effect to invalid value + caplog.clear() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "Effect3"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + assert "Invalid effect Effect3 for" in caplog.text light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) diff --git a/tests/components/tplink_omada/conftest.py b/tests/components/tplink_omada/conftest.py index c29fcb633e4..510a2e7a87c 100644 --- a/tests/components/tplink_omada/conftest.py +++ b/tests/components/tplink_omada/conftest.py @@ -1,6 +1,6 @@ """Test fixtures for TP-Link Omada integration.""" -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator import json from unittest.mock import AsyncMock, MagicMock, patch @@ -17,7 +17,6 @@ from tplink_omada_client.devices import ( OmadaSwitch, OmadaSwitchPortDetails, ) -from typing_extensions import Generator from homeassistant.components.tplink_omada.config_flow import CONF_SITE from homeassistant.components.tplink_omada.const import DOMAIN @@ -130,6 +129,7 @@ def _get_mock_client(mac: str) -> OmadaNetworkClient: if c["wireless"]: return OmadaWirelessClient(c) return OmadaWiredClient(c) + raise ValueError(f"Client with MAC {mac} not found in mock data") @pytest.fixture diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index feacbb7b13f..b25ab6a0a34 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -45,7 +45,7 @@ async def traccar_client( @pytest.fixture(autouse=True) -async def setup_zones(hass): +async def setup_zones(hass: HomeAssistant) -> None: """Set up Zone config in HA.""" assert await async_setup_component( hass, diff --git a/tests/components/traccar_server/conftest.py b/tests/components/traccar_server/conftest.py index 6a8e428e7a2..0013b3249bd 100644 --- a/tests/components/traccar_server/conftest.py +++ b/tests/components/traccar_server/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Traccar Server tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytraccar import ApiClient, SubscriptionStatus -from typing_extensions import Generator from homeassistant.components.traccar_server.const import ( CONF_CUSTOM_ATTRIBUTES, diff --git a/tests/components/traccar_server/test_config_flow.py b/tests/components/traccar_server/test_config_flow.py index 5da6f592957..62f39f00dc1 100644 --- a/tests/components/traccar_server/test_config_flow.py +++ b/tests/components/traccar_server/test_config_flow.py @@ -1,11 +1,11 @@ """Test the Traccar Server config flow.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock import pytest from pytraccar import TraccarException -from typing_extensions import Generator from homeassistant import config_entries from homeassistant.components.traccar.device_tracker import PLATFORM_SCHEMA diff --git a/tests/components/traccar_server/test_diagnostics.py b/tests/components/traccar_server/test_diagnostics.py index 15d74ef9ef5..738fea1a45d 100644 --- a/tests/components/traccar_server/test_diagnostics.py +++ b/tests/components/traccar_server/test_diagnostics.py @@ -1,9 +1,9 @@ """Test Traccar Server diagnostics.""" +from collections.abc import Generator from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from typing_extensions import Generator from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index 92ba2c67020..b0b982d4825 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -9,11 +9,11 @@ from unittest.mock import patch import pytest from pytest_unordered import unordered -from homeassistant.bootstrap import async_setup_component from homeassistant.components.trace.const import DEFAULT_STORED_TRACES from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import Context, CoreState, HomeAssistant, callback from homeassistant.helpers.typing import UNDEFINED +from homeassistant.setup import async_setup_component from homeassistant.util.uuid import random_uuid_hex from tests.common import load_fixture @@ -207,7 +207,7 @@ async def test_get_trace( _assert_raw_config(domain, sun_config, trace) assert trace["blueprint_inputs"] is None assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" @@ -899,7 +899,7 @@ async def test_list_traces( assert len(_find_traces(response["result"], domain, "sun")) == 1 trace = _find_traces(response["result"], domain, "sun")[0] assert trace["last_step"] == last_step[0].format(prefix=prefix) - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == script_execution[0] assert trace["timestamp"] @@ -1639,7 +1639,7 @@ async def test_trace_blueprint_automation( assert trace["config"]["id"] == "sun" assert trace["blueprint_inputs"] == sun_config assert trace["context"] - assert trace["error"] == "Service test.automation not found" + assert trace["error"] == "Action test.automation not found" assert trace["state"] == "stopped" assert trace["script_execution"] == "error" assert trace["item_id"] == "sun" diff --git a/tests/components/tractive/conftest.py b/tests/components/tractive/conftest.py index 9a17a557c49..7f319a87b5b 100644 --- a/tests/components/tractive/conftest.py +++ b/tests/components/tractive/conftest.py @@ -1,12 +1,12 @@ """Common fixtures for the Tractive tests.""" +from collections.abc import Generator from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiotractive.trackable_object import TrackableObject from aiotractive.tracker import Tracker import pytest -from typing_extensions import Generator from homeassistant.components.tractive.const import DOMAIN, SERVER_UNAVAILABLE from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/tractive/test_diagnostics.py b/tests/components/tractive/test_diagnostics.py index cc4fcdeba15..ce07b4d6e2a 100644 --- a/tests/components/tractive/test_diagnostics.py +++ b/tests/components/tractive/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -27,4 +28,4 @@ async def test_entry_diagnostics( hass, hass_client, mock_config_entry ) - assert result == snapshot + assert result == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/tradfri/conftest.py b/tests/components/tradfri/conftest.py index 08afe77b4a3..4b0b742850b 100644 --- a/tests/components/tradfri/conftest.py +++ b/tests/components/tradfri/conftest.py @@ -2,7 +2,7 @@ from __future__ import annotations -from collections.abc import Callable +from collections.abc import Callable, Generator import json from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -12,7 +12,6 @@ from pytradfri.command import Command from pytradfri.const import ATTR_FIRMWARE_VERSION, ATTR_GATEWAY_ID from pytradfri.device import Device from pytradfri.gateway import Gateway -from typing_extensions import Generator from homeassistant.components.tradfri.const import DOMAIN diff --git a/tests/components/tradfri/test_fan.py b/tests/components/tradfri/test_fan.py index 2abe03d629a..4f72e4709e9 100644 --- a/tests/components/tradfri/test_fan.py +++ b/tests/components/tradfri/test_fan.py @@ -52,7 +52,7 @@ async def test_fan_available( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 await command_store.trigger_observe_callback( hass, device, {ATTR_REACHABLE_STATE: 0} @@ -172,7 +172,7 @@ async def test_services( assert state.attributes[ATTR_PERCENTAGE_STEP] == pytest.approx(2.040816) assert state.attributes[ATTR_PRESET_MODES] == ["Auto"] assert state.attributes[ATTR_PRESET_MODE] is None - assert state.attributes[ATTR_SUPPORTED_FEATURES] == 9 + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 57 await hass.services.async_call( FAN_DOMAIN, diff --git a/tests/components/trafikverket_camera/conftest.py b/tests/components/trafikverket_camera/conftest.py index 61eebb623b2..cef85af2228 100644 --- a/tests/components/trafikverket_camera/conftest.py +++ b/tests/components/trafikverket_camera/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,7 +21,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, get_camera: CameraInfo + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + get_camera: CameraInfoModel, ) -> MockConfigEntry: """Set up the Trafikverket Camera integration in Home Assistant.""" aioclient_mock.get( @@ -51,10 +53,10 @@ async def load_integration_from_entry( @pytest.fixture(name="get_camera") -def fixture_get_camera() -> CameraInfo: +def fixture_get_camera() -> CameraInfoModel: """Construct Camera Mock.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, @@ -72,10 +74,10 @@ def fixture_get_camera() -> CameraInfo: @pytest.fixture(name="get_camera2") -def fixture_get_camera2() -> CameraInfo: +def fixture_get_camera2() -> CameraInfoModel: """Construct Camera Mock 2.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera2", camera_id="5678", active=True, @@ -93,11 +95,11 @@ def fixture_get_camera2() -> CameraInfo: @pytest.fixture(name="get_cameras") -def fixture_get_cameras() -> CameraInfo: +def fixture_get_cameras() -> CameraInfoModel: """Construct Camera Mock with multiple cameras.""" return [ - CameraInfo( + CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, @@ -112,7 +114,7 @@ def fixture_get_cameras() -> CameraInfo: status="Running", camera_type="Road", ), - CameraInfo( + CameraInfoModel( camera_name="Test Camera2", camera_id="5678", active=True, @@ -131,10 +133,10 @@ def fixture_get_cameras() -> CameraInfo: @pytest.fixture(name="get_camera_no_location") -def fixture_get_camera_no_location() -> CameraInfo: +def fixture_get_camera_no_location() -> CameraInfoModel: """Construct Camera Mock.""" - return CameraInfo( + return CameraInfoModel( camera_name="Test Camera", camera_id="1234", active=True, diff --git a/tests/components/trafikverket_camera/test_binary_sensor.py b/tests/components/trafikverket_camera/test_binary_sensor.py index 6c694f76233..6750c05772b 100644 --- a/tests/components/trafikverket_camera/test_binary_sensor.py +++ b/tests/components/trafikverket_camera/test_binary_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_ON @@ -14,7 +14,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera binary sensor.""" diff --git a/tests/components/trafikverket_camera/test_camera.py b/tests/components/trafikverket_camera/test_camera.py index 1bf742b5f08..51d4563c19b 100644 --- a/tests/components/trafikverket_camera/test_camera.py +++ b/tests/components/trafikverket_camera/test_camera.py @@ -7,7 +7,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.camera import async_get_image from homeassistant.config_entries import ConfigEntry @@ -24,7 +24,7 @@ async def test_camera( freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera sensor.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 8162db076fa..2e9e34f4c35 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -6,7 +6,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoCameraFound, UnknownError -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant import config_entries from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -17,7 +17,7 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: +async def test_form(hass: HomeAssistant, get_camera: CameraInfoModel) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -56,7 +56,9 @@ async def test_form(hass: HomeAssistant, get_camera: CameraInfo) -> None: async def test_form_multiple_cameras( - hass: HomeAssistant, get_cameras: list[CameraInfo], get_camera2: CameraInfo + hass: HomeAssistant, + get_cameras: list[CameraInfoModel], + get_camera2: CameraInfoModel, ) -> None: """Test we get the form with multiple cameras.""" @@ -108,7 +110,7 @@ async def test_form_multiple_cameras( async def test_form_no_location_data( - hass: HomeAssistant, get_camera_no_location: CameraInfo + hass: HomeAssistant, get_camera_no_location: CameraInfoModel ) -> None: """Test we get the form.""" diff --git a/tests/components/trafikverket_camera/test_coordinator.py b/tests/components/trafikverket_camera/test_coordinator.py index 3f37ad05575..f50ab56724e 100644 --- a/tests/components/trafikverket_camera/test_coordinator.py +++ b/tests/components/trafikverket_camera/test_coordinator.py @@ -11,9 +11,9 @@ from pytrafikverket.exceptions import ( NoCameraFound, UnknownError, ) +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera.const import DOMAIN -from homeassistant.components.trafikverket_camera.coordinator import CameraData from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -28,7 +28,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_coordinator( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( @@ -86,7 +86,7 @@ async def test_coordinator( async def test_coordinator_failed_update( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, sideeffect: str, p_error: Exception, entry_state: str, @@ -123,7 +123,7 @@ async def test_coordinator_failed_update( async def test_coordinator_failed_get_image( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - get_camera: CameraData, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera coordinator.""" aioclient_mock.get( diff --git a/tests/components/trafikverket_camera/test_init.py b/tests/components/trafikverket_camera/test_init.py index f21d36fda27..aaa4c3cfed7 100644 --- a/tests/components/trafikverket_camera/test_init.py +++ b/tests/components/trafikverket_camera/test_init.py @@ -7,7 +7,7 @@ from unittest.mock import patch import pytest from pytrafikverket.exceptions import UnknownError -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.trafikverket_camera import async_migrate_entry from homeassistant.components.trafikverket_camera.const import DOMAIN @@ -23,7 +23,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def test_setup_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test setup entry.""" @@ -55,7 +55,7 @@ async def test_setup_entry( async def test_unload_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test unload an entry.""" @@ -89,7 +89,7 @@ async def test_unload_entry( async def test_migrate_entry( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry to version 2.""" @@ -136,7 +136,7 @@ async def test_migrate_entry( ) async def test_migrate_entry_fails_with_error( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, version: int, unique_id: str, @@ -205,7 +205,7 @@ async def test_migrate_entry_fails_no_id( ) entry.add_to_hass(hass) - _camera = CameraInfo( + _camera = CameraInfoModel( camera_name="Test_camera", camera_id=None, active=True, @@ -236,7 +236,7 @@ async def test_migrate_entry_fails_no_id( async def test_no_migration_needed( hass: HomeAssistant, - get_camera: CameraInfo, + get_camera: CameraInfoModel, aioclient_mock: AiohttpClientMocker, ) -> None: """Test migrate entry fails, camera returns no id.""" diff --git a/tests/components/trafikverket_camera/test_recorder.py b/tests/components/trafikverket_camera/test_recorder.py index 23ebd3f2189..d9778ab851a 100644 --- a/tests/components/trafikverket_camera/test_recorder.py +++ b/tests/components/trafikverket_camera/test_recorder.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states @@ -22,7 +22,7 @@ async def test_exclude_attributes( load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, aioclient_mock: AiohttpClientMocker, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test camera has description and location excluded from recording.""" state1 = hass.states.get("camera.test_camera") diff --git a/tests/components/trafikverket_camera/test_sensor.py b/tests/components/trafikverket_camera/test_sensor.py index 18ccbe56070..0f4ef02a850 100644 --- a/tests/components/trafikverket_camera/test_sensor.py +++ b/tests/components/trafikverket_camera/test_sensor.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from pytrafikverket.trafikverket_camera import CameraInfo +from pytrafikverket.models import CameraInfoModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, - get_camera: CameraInfo, + get_camera: CameraInfoModel, ) -> None: """Test the Trafikverket Camera sensor.""" diff --git a/tests/components/trafikverket_ferry/conftest.py b/tests/components/trafikverket_ferry/conftest.py index 3491b8474af..99f3ad10636 100644 --- a/tests/components/trafikverket_ferry/conftest.py +++ b/tests/components/trafikverket_ferry/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -20,7 +20,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( - hass: HomeAssistant, get_ferries: list[FerryStop] + hass: HomeAssistant, get_ferries: list[FerryStopModel] ) -> MockConfigEntry: """Set up the Trafikverket Ferry integration in Home Assistant.""" config_entry = MockConfigEntry( @@ -44,40 +44,51 @@ async def load_integration_from_entry( @pytest.fixture(name="get_ferries") -def fixture_get_ferries() -> list[FerryStop]: +def fixture_get_ferries() -> list[FerryStopModel]: """Construct FerryStop Mock.""" - depart1 = FerryStop( - "13", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + depart1 = FerryStopModel( + ferry_stop_id="13", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime( + dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC + ), + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) - depart2 = FerryStop( - "14", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart2 = FerryStopModel( + ferry_stop_id="14", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) - depart3 = FerryStop( - "15", - False, - datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + depart3 = FerryStopModel( + ferry_stop_id="15", + ferry_stop_name="Harbor1lane", + short_name="Harle", + deleted=False, + departure_time=datetime(dt_util.now().year + 1, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), - [""], - "0", - datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), - "Harbor 1", - "Harbor 2", + other_information=[""], + deviation_id="0", + modified_time=datetime(dt_util.now().year, 5, 1, 12, 0, tzinfo=dt_util.UTC), + from_harbor_name="Harbor 1", + to_harbor_name="Harbor 2", + type_name="Turnaround", ) return [depart1, depart2, depart3] diff --git a/tests/components/trafikverket_ferry/test_coordinator.py b/tests/components/trafikverket_ferry/test_coordinator.py index ef6329bfd82..ae9a8fc3626 100644 --- a/tests/components/trafikverket_ferry/test_coordinator.py +++ b/tests/components/trafikverket_ferry/test_coordinator.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoFerryFound -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.components.trafikverket_ferry.coordinator import next_departuredate @@ -27,7 +27,7 @@ async def test_coordinator( hass: HomeAssistant, freezer: FrozenDateTimeFactory, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStop], + get_ferries: list[FerryStopModel], ) -> None: """Test the Trafikverket Ferry coordinator.""" entry = MockConfigEntry( diff --git a/tests/components/trafikverket_ferry/test_init.py b/tests/components/trafikverket_ferry/test_init.py index 22ada7e0f40..827711363ff 100644 --- a/tests/components/trafikverket_ferry/test_init.py +++ b/tests/components/trafikverket_ferry/test_init.py @@ -4,7 +4,7 @@ from __future__ import annotations from unittest.mock import patch -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.components.trafikverket_ferry.const import DOMAIN from homeassistant.config_entries import SOURCE_USER, ConfigEntryState @@ -15,7 +15,9 @@ from . import ENTRY_CONFIG from tests.common import MockConfigEntry -async def test_setup_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: +async def test_setup_entry( + hass: HomeAssistant, get_ferries: list[FerryStopModel] +) -> None: """Test setup entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -37,7 +39,9 @@ async def test_setup_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> assert len(mock_tvt_ferry.mock_calls) == 1 -async def test_unload_entry(hass: HomeAssistant, get_ferries: list[FerryStop]) -> None: +async def test_unload_entry( + hass: HomeAssistant, get_ferries: list[FerryStopModel] +) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/trafikverket_ferry/test_sensor.py b/tests/components/trafikverket_ferry/test_sensor.py index fc8fa557714..bc5510b0b1d 100644 --- a/tests/components/trafikverket_ferry/test_sensor.py +++ b/tests/components/trafikverket_ferry/test_sensor.py @@ -6,7 +6,7 @@ from datetime import timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_ferry import FerryStop +from pytrafikverket.models import FerryStopModel from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -19,7 +19,7 @@ async def test_sensor( hass: HomeAssistant, load_int: ConfigEntry, monkeypatch: pytest.MonkeyPatch, - get_ferries: list[FerryStop], + get_ferries: list[FerryStopModel], ) -> None: """Test the Trafikverket Ferry sensor.""" state1 = hass.states.get("sensor.harbor1_departure_from") diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 7221d96bae2..4915635e316 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -6,7 +6,7 @@ from datetime import datetime, timedelta from unittest.mock import patch import pytest -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from homeassistant.components.trafikverket_train.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -21,8 +21,8 @@ from tests.common import MockConfigEntry @pytest.fixture(name="load_int") async def load_integration_from_entry( hass: HomeAssistant, - get_trains: list[TrainStop], - get_train_stop: TrainStop, + get_trains: list[TrainStopModel], + get_train_stop: TrainStopModel, ) -> MockConfigEntry: """Set up the Trafikverket Train integration in Home Assistant.""" @@ -69,11 +69,11 @@ async def load_integration_from_entry( @pytest.fixture(name="get_trains") -def fixture_get_trains() -> list[TrainStop]: +def fixture_get_trains() -> list[TrainStopModel]: """Construct TrainStop Mock.""" - depart1 = TrainStop( - id=13, + depart1 = TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), @@ -83,8 +83,8 @@ def fixture_get_trains() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStop( - id=14, + depart2 = TrainStopModel( + train_stop_id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -95,8 +95,8 @@ def fixture_get_trains() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStop( - id=15, + depart3 = TrainStopModel( + train_stop_id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -112,11 +112,11 @@ def fixture_get_trains() -> list[TrainStop]: @pytest.fixture(name="get_trains_next") -def fixture_get_trains_next() -> list[TrainStop]: +def fixture_get_trains_next() -> list[TrainStopModel]: """Construct TrainStop Mock.""" - depart1 = TrainStop( - id=13, + depart1 = TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), estimated_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC), @@ -126,8 +126,8 @@ def fixture_get_trains_next() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart2 = TrainStop( - id=14, + depart2 = TrainStopModel( + train_stop_id=14, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=15), @@ -138,8 +138,8 @@ def fixture_get_trains_next() -> list[TrainStop]: modified_time=datetime(2023, 5, 1, 12, 0, tzinfo=dt_util.UTC), product_description=["Regionaltåg"], ) - depart3 = TrainStop( - id=15, + depart3 = TrainStopModel( + train_stop_id=15, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 17, 0, tzinfo=dt_util.UTC) + timedelta(minutes=30), @@ -155,11 +155,11 @@ def fixture_get_trains_next() -> list[TrainStop]: @pytest.fixture(name="get_train_stop") -def fixture_get_train_stop() -> TrainStop: +def fixture_get_train_stop() -> TrainStopModel: """Construct TrainStop Mock.""" - return TrainStop( - id=13, + return TrainStopModel( + train_stop_id=13, canceled=False, advertised_time_at_location=datetime(2023, 5, 1, 11, 0, tzinfo=dt_util.UTC), estimated_time_at_location=None, diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index a6ba82a85bc..400f396d355 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -12,7 +12,7 @@ from pytrafikverket.exceptions import ( NoTrainStationFound, UnknownError, ) -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from homeassistant import config_entries from homeassistant.components.trafikverket_train.const import ( @@ -479,8 +479,8 @@ async def test_reauth_flow_error_departures( async def test_options_flow( hass: HomeAssistant, - get_trains: list[TrainStop], - get_train_stop: TrainStop, + get_trains: list[TrainStopModel], + get_train_stop: TrainStopModel, ) -> None: """Test a reauthentication flow.""" entry = MockConfigEntry( diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index 329d8d716d0..06598297dd1 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -5,7 +5,7 @@ from __future__ import annotations from unittest.mock import patch from pytrafikverket.exceptions import InvalidAuthentication, NoTrainStationFound -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from syrupy.assertion import SnapshotAssertion from homeassistant.components.trafikverket_train.const import DOMAIN @@ -18,7 +18,9 @@ from . import ENTRY_CONFIG, OPTIONS_CONFIG from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> None: +async def test_unload_entry( + hass: HomeAssistant, get_trains: list[TrainStopModel] +) -> None: """Test unload an entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -52,7 +54,7 @@ async def test_unload_entry(hass: HomeAssistant, get_trains: list[TrainStop]) -> async def test_auth_failed( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test authentication failed.""" @@ -82,7 +84,7 @@ async def test_auth_failed( async def test_no_stations( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test stations are missing.""" @@ -108,7 +110,7 @@ async def test_no_stations( async def test_migrate_entity_unique_id( hass: HomeAssistant, - get_trains: list[TrainStop], + get_trains: list[TrainStopModel], snapshot: SnapshotAssertion, entity_registry: EntityRegistry, ) -> None: diff --git a/tests/components/trafikverket_train/test_sensor.py b/tests/components/trafikverket_train/test_sensor.py index f21561dd287..f4da3526cb2 100644 --- a/tests/components/trafikverket_train/test_sensor.py +++ b/tests/components/trafikverket_train/test_sensor.py @@ -8,7 +8,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest from pytrafikverket.exceptions import InvalidAuthentication, NoTrainAnnouncementFound -from pytrafikverket.trafikverket_train import TrainStop +from pytrafikverket.models import TrainStopModel from syrupy.assertion import SnapshotAssertion from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry @@ -23,8 +23,8 @@ async def test_sensor_next( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], - get_train_stop: TrainStop, + get_trains_next: list[TrainStopModel], + get_train_stop: TrainStopModel, snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -70,7 +70,7 @@ async def test_sensor_single_stop( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor.""" @@ -86,7 +86,7 @@ async def test_sensor_update_auth_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with authentication update failure.""" @@ -119,7 +119,7 @@ async def test_sensor_update_failure( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure.""" @@ -149,7 +149,7 @@ async def test_sensor_update_failure_no_state( hass: HomeAssistant, freezer: FrozenDateTimeFactory, load_int: ConfigEntry, - get_trains_next: list[TrainStop], + get_trains_next: list[TrainStopModel], snapshot: SnapshotAssertion, ) -> None: """Test the Trafikverket Train sensor with update failure from empty state.""" diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index b99e6400273..1331f441940 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import Generator from http import HTTPStatus from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator import voluptuous as vol from homeassistant.components import media_source diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index b8abb086260..d9a4499f544 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -3,11 +3,11 @@ From http://doc.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures """ +from collections.abc import Generator from pathlib import Path from unittest.mock import MagicMock import pytest -from typing_extensions import Generator from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigFlow diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index e0354170b06..bf44f120134 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -47,15 +47,8 @@ ORIG_WRITE_TAGS = tts.SpeechManager.write_tags class DefaultEntity(tts.TextToSpeechEntity): """Test entity.""" - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return SUPPORT_LANGUAGES - - @property - def default_language(self) -> str: - """Return the default language.""" - return DEFAULT_LANG + _attr_supported_languages = SUPPORT_LANGUAGES + _attr_default_language = DEFAULT_LANG async def test_default_entity_attributes() -> None: @@ -523,10 +516,7 @@ class MockProviderWithDefaults(MockProvider): class MockEntityWithDefaults(MockTTSEntity): """Mock entity with default options.""" - @property - def default_options(self): - """Return a mapping with the default options.""" - return {"voice": "alex"} + _attr_default_options = {"voice": "alex"} @pytest.mark.parametrize( @@ -1054,9 +1044,7 @@ async def test_setup_legacy_cache_dir( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_setup(hass, mock_provider) await hass.services.async_call( @@ -1090,9 +1078,7 @@ async def test_setup_cache_dir( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_config_entry_setup(hass, mock_tts_entity) await hass.services.async_call( @@ -1195,9 +1181,7 @@ async def test_load_cache_legacy_retrieve_without_mem_cache( mock_tts_cache_dir / "42f18378fd4393d18c8dd11d03fa9563c1e54491_en_-_test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_setup(hass, mock_provider) client = await hass_client() @@ -1221,9 +1205,7 @@ async def test_load_cache_retrieve_without_mem_cache( "42f18378fd4393d18c8dd11d03fa9563c1e54491_en-us_-_tts.test.mp3" ) - with open(cache_file, "wb") as voice_file: - voice_file.write(tts_data) - + await hass.async_add_executor_job(Path(cache_file).write_bytes, tts_data) await mock_config_entry_setup(hass, mock_tts_entity) client = await hass_client() @@ -1766,3 +1748,93 @@ async def test_async_convert_audio_error(hass: HomeAssistant) -> None: with pytest.raises(RuntimeError): # Simulate a bad WAV file await tts.async_convert_audio(hass, "wav", bytes(0), "mp3") + + +async def test_ttsentity_subclass_properties( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test for errors when subclasses of the TextToSpeechEntity are missing required properties.""" + + class TestClass1(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass1()) + + class TestClass2(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass2()) + + assert all(record.exc_info is None for record in caplog.records) + + caplog.clear() + + class TestClass3(tts.TextToSpeechEntity): + _attr_default_language = DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass3()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass4(tts.TextToSpeechEntity): + _attr_supported_languages = SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass4()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass5(tts.TextToSpeechEntity): + @property + def default_language(self) -> str: + return DEFAULT_LANG + + await mock_config_entry_setup(hass, TestClass5()) + + assert ( + "TTS entities must either set the '_attr_supported_languages' attribute or override the 'supported_languages' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) + caplog.clear() + + class TestClass6(tts.TextToSpeechEntity): + @property + def supported_languages(self) -> list[str]: + return SUPPORT_LANGUAGES + + await mock_config_entry_setup(hass, TestClass6()) + + assert ( + "TTS entities must either set the '_attr_default_language' attribute or override the 'default_language' property" + in [ + str(record.exc_info[1]) + for record in caplog.records + if record.exc_info is not None + ] + ) diff --git a/tests/components/tuya/conftest.py b/tests/components/tuya/conftest.py index 981e12ecceb..4fffb3ae389 100644 --- a/tests/components/tuya/conftest.py +++ b/tests/components/tuya/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations -from unittest.mock import AsyncMock, MagicMock, patch +from collections.abc import Generator +from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN @@ -35,7 +35,7 @@ def mock_config_entry() -> MockConfigEntry: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[None]: """Mock setting up a config entry.""" with patch("homeassistant.components.tuya.async_setup_entry", return_value=True): yield diff --git a/tests/components/twentemilieu/conftest.py b/tests/components/twentemilieu/conftest.py index 7b157572824..7ecf1657ce9 100644 --- a/tests/components/twentemilieu/conftest.py +++ b/tests/components/twentemilieu/conftest.py @@ -2,12 +2,12 @@ from __future__ import annotations +from collections.abc import Generator from datetime import date from unittest.mock import MagicMock, patch import pytest from twentemilieu import WasteType -from typing_extensions import Generator from homeassistant.components.twentemilieu.const import ( CONF_HOUSE_LETTER, diff --git a/tests/components/twentemilieu/snapshots/test_calendar.ambr b/tests/components/twentemilieu/snapshots/test_calendar.ambr index e6de21fdca1..1df4beb4232 100644 --- a/tests/components/twentemilieu/snapshots/test_calendar.ambr +++ b/tests/components/twentemilieu/snapshots/test_calendar.ambr @@ -99,6 +99,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/twentemilieu/snapshots/test_sensor.ambr b/tests/components/twentemilieu/snapshots/test_sensor.ambr index 22dcb0331cd..86ffc171082 100644 --- a/tests/components/twentemilieu/snapshots/test_sensor.ambr +++ b/tests/components/twentemilieu/snapshots/test_sensor.ambr @@ -68,6 +68,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -146,6 +147,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -224,6 +226,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -302,6 +305,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , @@ -380,6 +384,7 @@ }), 'manufacturer': 'Twente Milieu', 'model': None, + 'model_id': None, 'name': 'Twente Milieu', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/twinkly/test_diagnostics.py b/tests/components/twinkly/test_diagnostics.py index 5cb9fc1fe9e..f9cf0bc562c 100644 --- a/tests/components/twinkly/test_diagnostics.py +++ b/tests/components/twinkly/test_diagnostics.py @@ -3,6 +3,7 @@ from collections.abc import Awaitable, Callable from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -26,4 +27,6 @@ async def test_diagnostics( await setup_integration() entry = hass.config_entries.async_entries(DOMAIN)[0] - assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot + assert await get_diagnostics_for_config_entry(hass, hass_client, entry) == snapshot( + exclude=props("created_at", "modified_at") + ) diff --git a/tests/components/twitch/__init__.py b/tests/components/twitch/__init__.py index 0238bbdadba..2d70aaf9649 100644 --- a/tests/components/twitch/__init__.py +++ b/tests/components/twitch/__init__.py @@ -1,10 +1,9 @@ """Tests for the Twitch component.""" -from collections.abc import AsyncIterator +from collections.abc import AsyncGenerator, AsyncIterator from typing import Any, Generic, TypeVar from twitchAPI.object.base import TwitchObject -from typing_extensions import AsyncGenerator from homeassistant.components.twitch import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/twitch/conftest.py b/tests/components/twitch/conftest.py index 6c243a8dbbf..25e443c2778 100644 --- a/tests/components/twitch/conftest.py +++ b/tests/components/twitch/conftest.py @@ -1,11 +1,11 @@ """Configure tests for the Twitch integration.""" +from collections.abc import Generator import time from unittest.mock import AsyncMock, patch import pytest from twitchAPI.object.api import FollowedChannel, Stream, TwitchUser, UserSubscription -from typing_extensions import Generator from homeassistant.components.application_credentials import ( ClientCredential, diff --git a/tests/components/twitch/fixtures/get_streams.json b/tests/components/twitch/fixtures/get_streams.json index 3714d97aaef..53330c9c82e 100644 --- a/tests/components/twitch/fixtures/get_streams.json +++ b/tests/components/twitch/fixtures/get_streams.json @@ -2,6 +2,7 @@ { "game_name": "Good game", "title": "Title", - "thumbnail_url": "stream-medium.png" + "thumbnail_url": "stream-medium.png", + "started_at": "2021-03-10T03:18:11Z" } ] diff --git a/tests/components/twitch/test_sensor.py b/tests/components/twitch/test_sensor.py index e5cddf8e192..8ce146adf07 100644 --- a/tests/components/twitch/test_sensor.py +++ b/tests/components/twitch/test_sensor.py @@ -3,6 +3,7 @@ from datetime import datetime from unittest.mock import AsyncMock +from dateutil.tz import tzutc from twitchAPI.object.api import FollowedChannel, Stream, UserSubscription from twitchAPI.type import TwitchResourceNotFound @@ -41,6 +42,9 @@ async def test_streaming( assert sensor_state.attributes["entity_picture"] == "stream-medium.png" assert sensor_state.attributes["game"] == "Good game" assert sensor_state.attributes["title"] == "Title" + assert sensor_state.attributes["started_at"] == datetime( + year=2021, month=3, day=10, hour=3, minute=18, second=11, tzinfo=tzutc() + ) async def test_oauth_without_sub_and_follow( diff --git a/tests/components/ukraine_alarm/test_config_flow.py b/tests/components/ukraine_alarm/test_config_flow.py index 58b5dde2bac..de9bdd618de 100644 --- a/tests/components/ukraine_alarm/test_config_flow.py +++ b/tests/components/ukraine_alarm/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Ukraine Alarm config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from aiohttp import ClientConnectionError, ClientError, ClientResponseError, RequestInfo import pytest -from typing_extensions import Generator from yarl import URL from homeassistant import config_entries diff --git a/tests/components/unifi/conftest.py b/tests/components/unifi/conftest.py index 4a7d86eea38..798b613b18d 100644 --- a/tests/components/unifi/conftest.py +++ b/tests/components/unifi/conftest.py @@ -3,21 +3,19 @@ from __future__ import annotations import asyncio -from collections.abc import Callable +from collections.abc import Callable, Coroutine, Generator from datetime import timedelta from types import MappingProxyType -from typing import Any +from typing import Any, Protocol from unittest.mock import AsyncMock, patch from aiounifi.models.message import MessageKey import orjson import pytest -from typing_extensions import Generator from homeassistant.components.unifi import STORAGE_KEY, STORAGE_VERSION from homeassistant.components.unifi.const import CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.hub.websocket import RETRY_TIMER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -53,6 +51,20 @@ CONTROLLER_HOST = { "uptime": 1562600160, } +type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] + + +class WebsocketMessageMock(Protocol): + """Fixture to mock websocket message.""" + + def __call__( + self, + *, + message: MessageKey | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, + ) -> None: + """Send websocket message.""" + @pytest.fixture(autouse=True, name="mock_discovery") def fixture_discovery(): @@ -97,7 +109,7 @@ def fixture_config_entry( hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], -) -> ConfigEntry: +) -> MockConfigEntry: """Define a config entry fixture.""" config_entry = MockConfigEntry( domain=UNIFI_DOMAIN, @@ -161,6 +173,7 @@ def fixture_request( dpi_app_payload: list[dict[str, Any]], dpi_group_payload: list[dict[str, Any]], port_forward_payload: list[dict[str, Any]], + traffic_rule_payload: list[dict[str, Any]], site_payload: list[dict[str, Any]], system_information_payload: list[dict[str, Any]], wlan_payload: list[dict[str, Any]], @@ -171,9 +184,16 @@ def fixture_request( url = f"https://{host}:{DEFAULT_PORT}" def mock_get_request(path: str, payload: list[dict[str, Any]]) -> None: + # APIV2 request respoonses have `meta` and `data` automatically appended + json = {} + if path.startswith("/v2"): + json = payload + else: + json = {"meta": {"rc": "OK"}, "data": payload} + aioclient_mock.get( f"{url}{path}", - json={"meta": {"rc": "OK"}, "data": payload}, + json=json, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -183,6 +203,7 @@ def fixture_request( json={"data": "login successful", "meta": {"rc": "ok"}}, headers={"content-type": CONTENT_TYPE_JSON}, ) + mock_get_request("/api/self/sites", site_payload) mock_get_request(f"/api/s/{site_id}/stat/sta", client_payload) mock_get_request(f"/api/s/{site_id}/rest/user", clients_all_payload) @@ -192,6 +213,7 @@ def fixture_request( mock_get_request(f"/api/s/{site_id}/rest/portforward", port_forward_payload) mock_get_request(f"/api/s/{site_id}/stat/sysinfo", system_information_payload) mock_get_request(f"/api/s/{site_id}/rest/wlanconf", wlan_payload) + mock_get_request(f"/v2/api/site/{site_id}/trafficrules", traffic_rule_payload) return __mock_requests @@ -263,6 +285,12 @@ def fixture_system_information_data() -> list[dict[str, Any]]: ] +@pytest.fixture(name="traffic_rule_payload") +def traffic_rule_payload_data() -> list[dict[str, Any]]: + """Traffic rule data.""" + return [] + + @pytest.fixture(name="wlan_payload") def fixture_wlan_data() -> list[dict[str, Any]]: """WLAN data.""" @@ -280,12 +308,12 @@ def fixture_default_requests( @pytest.fixture(name="config_entry_factory") async def fixture_config_entry_factory( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: MockConfigEntry, mock_requests: Callable[[str, str], None], -) -> Callable[[], ConfigEntry]: +) -> ConfigEntryFactoryType: """Fixture factory that can set up UniFi network integration.""" - async def __mock_setup_config_entry() -> ConfigEntry: + async def __mock_setup_config_entry() -> MockConfigEntry: mock_requests(config_entry.data[CONF_HOST], config_entry.data[CONF_SITE_ID]) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -296,8 +324,8 @@ async def fixture_config_entry_factory( @pytest.fixture(name="config_entry_setup") async def fixture_config_entry_setup( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] -) -> ConfigEntry: + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: """Fixture providing a set up instance of UniFi network integration.""" return await config_entry_factory() @@ -367,13 +395,15 @@ def fixture_aiounifi_websocket_state( @pytest.fixture(name="mock_websocket_message") -def fixture_aiounifi_websocket_message(_mock_websocket: AsyncMock): +def fixture_aiounifi_websocket_message( + _mock_websocket: AsyncMock, +) -> WebsocketMessageMock: """No real websocket allowed.""" def make_websocket_call( *, message: MessageKey | None = None, - data: list[dict] | dict | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, ) -> None: """Generate a websocket call.""" message_handler = _mock_websocket.call_args[0][0] diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr new file mode 100644 index 00000000000..51a37620268 --- /dev/null +++ b/tests/components/unifi/snapshots/test_button.ambr @@ -0,0 +1,236 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-00:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Restart', + }), + 'context': , + 'entity_id': 'button.switch_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.ssid_1_regenerate_password', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Regenerate Password', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'regenerate_password-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'SSID 1 Regenerate Password', + }), + 'context': , + 'entity_id': 'button.ssid_1_regenerate_password', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-00:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.switch_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.switch_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.switch_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'switch Restart', + }), + 'context': , + 'entity_id': 'button.switch_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/unifi/snapshots/test_device_tracker.ambr b/tests/components/unifi/snapshots/test_device_tracker.ambr new file mode 100644 index 00000000000..3debd512050 --- /dev/null +++ b/tests/components/unifi/snapshots/test_device_tracker.ambr @@ -0,0 +1,149 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.switch_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.switch_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch 1', + 'ip': '10.0.1.1', + 'mac': '00:00:00:00:01:01', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.switch_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'home', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.wd_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'wd_client_1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'site_id-00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.wd_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'wd_client_1', + 'host_name': 'wd_client_1', + 'mac': '00:00:00:00:00:02', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.wd_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'device_tracker', + 'entity_category': , + 'entity_id': 'device_tracker.ws_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'ws_client_1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'site_id-00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0-client_payload0][device_tracker.ws_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'ws_client_1', + 'host_name': 'ws_client_1', + 'ip': '10.0.0.1', + 'mac': '00:00:00:00:00:01', + 'source_type': , + }), + 'context': , + 'entity_id': 'device_tracker.ws_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'not_home', + }) +# --- diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index 83d76688ea3..e33ec678217 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -1,4 +1,52 @@ # serializer version: 1 +# name: test_entity_and_device_data[site_payload0-wlan_payload0][image.ssid_1_qr_code-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'image', + 'entity_category': , + 'entity_id': 'image.ssid_1_qr_code', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'QR Code', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'qr_code-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0][image.ssid_1_qr_code-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/image_proxy/image.ssid_1_qr_code?token=1', + 'friendly_name': 'SSID 1 QR Code', + }), + 'context': , + 'entity_id': 'image.ssid_1_qr_code', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:01:00+00:00', + }) +# --- # name: test_wlan_qr_code b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5, + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 1', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.3.17.11279', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][update.device_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 2', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.0.42.10433', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 1', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.3.17.11279', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.device_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'device_update-00:00:00:00:01:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload1-device_payload0][update.device_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/unifi/icon.png', + 'friendly_name': 'Device 2', + 'in_progress': False, + 'installed_version': '4.0.42.10433', + 'latest_version': '4.0.42.10433', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.device_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index b7bf19aedc2..fc3aeccea9f 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -7,23 +7,29 @@ from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion -from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.unifi.const import CONF_SITE_ID -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( - ATTR_DEVICE_CLASS, CONF_HOST, CONTENT_TYPE_JSON, STATE_UNAVAILABLE, - EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker RANDOM_TOKEN = "random_token" @@ -121,33 +127,44 @@ WLAN_REGENERATE_PASSWORD = [ ] -async def _test_button_entity( +@pytest.mark.parametrize("device_payload", [DEVICE_RESTART + DEVICE_POWER_CYCLE_POE]) +@pytest.mark.parametrize("wlan_payload", [WLAN_REGENERATE_PASSWORD]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.BUTTON]): + config_entry = await config_entry_factory() + if site_payload[0]["role"] == "admin": + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + else: + assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == 0 + + +async def _test_button_entity( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry: ConfigEntry, - entity_count: int, + mock_websocket_state: WebsocketStateManager, + config_entry: MockConfigEntry, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], call: dict[str, str], ) -> None: """Test button entity.""" - assert len(hass.states.async_entity_ids(BUTTON_DOMAIN)) == entity_count - - ent_reg_entry = entity_registry.async_get(entity_id) - assert ent_reg_entry.unique_id == unique_id - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - - # Validate state object - button = hass.states.get(entity_id) - assert button is not None - assert button.attributes.get(ATTR_DEVICE_CLASS) == device_class - # Send and validate device command aioclient_mock.clear_requests() aioclient_mock.request( @@ -177,10 +194,7 @@ async def _test_button_entity( @pytest.mark.parametrize( ( "device_payload", - "entity_count", "entity_id", - "unique_id", - "device_class", "request_method", "request_path", "call", @@ -188,10 +202,7 @@ async def _test_button_entity( [ ( DEVICE_RESTART, - 1, "button.switch_restart", - "device_restart-00:00:00:00:01:01", - ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -202,10 +213,7 @@ async def _test_button_entity( ), ( DEVICE_POWER_CYCLE_POE, - 2, "button.switch_port_1_power_cycle", - "power_cycle-00:00:00:00:01:01_1", - ButtonDeviceClass.RESTART, "post", "/cmd/devmgr", { @@ -218,14 +226,10 @@ async def _test_button_entity( ) async def test_device_button_entities( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, - entity_count: int, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, call: dict[str, str], @@ -233,14 +237,10 @@ async def test_device_button_entities( """Test button entities based on device sources.""" await _test_button_entity( hass, - entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, - entity_count, entity_id, - unique_id, - device_class, request_method, request_path, {}, @@ -251,10 +251,7 @@ async def test_device_button_entities( @pytest.mark.parametrize( ( "wlan_payload", - "entity_count", "entity_id", - "unique_id", - "device_class", "request_method", "request_path", "request_data", @@ -263,10 +260,7 @@ async def test_device_button_entities( [ ( WLAN_REGENERATE_PASSWORD, - 1, "button.ssid_1_regenerate_password", - "regenerate_password-012345678910111213141516", - ButtonDeviceClass.UPDATE, "put", f"/rest/wlanconf/{WLAN_REGENERATE_PASSWORD[0]["_id"]}", { @@ -281,12 +275,9 @@ async def test_wlan_button_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, - entity_count: int, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, - unique_id: str, - device_class: ButtonDeviceClass, request_method: str, request_path: str, request_data: dict[str, Any], @@ -308,14 +299,10 @@ async def test_wlan_button_entities( await _test_button_entity( hass, - entity_registry, aioclient_mock, mock_websocket_state, config_entry_setup, - entity_count, entity_id, - unique_id, - device_class, request_method, request_path, request_data, @@ -327,7 +314,7 @@ async def test_wlan_button_entities( @pytest.mark.usefixtures("config_entry_setup") async def test_power_cycle_availability( hass: HomeAssistant, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: dict[str, Any], ) -> None: """Verify that disabling PoE marks entity as unavailable.""" diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 9ae3af19b46..1d745511dc5 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -1,6 +1,5 @@ """Test UniFi Network config flow.""" -from collections.abc import Callable import socket from unittest.mock import PropertyMock, patch @@ -25,7 +24,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -36,8 +35,9 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import ConfigEntryFactoryType + from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker CLIENTS = [{"mac": "00:00:00:00:00:01"}] @@ -137,9 +137,7 @@ async def test_flow_works(hass: HomeAssistant, mock_discovery) -> None: } -async def test_flow_works_negative_discovery( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: +async def test_flow_works_negative_discovery(hass: HomeAssistant) -> None: """Test config flow with a negative outcome of async_discovery_unifi.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -299,7 +297,7 @@ async def test_flow_fails_hub_unavailable(hass: HomeAssistant) -> None: async def test_reauth_flow_update_configuration( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup @@ -340,7 +338,7 @@ async def test_reauth_flow_update_configuration( async def test_reauth_flow_update_configuration_on_not_loaded_entry( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Verify reauth flow can update hub configuration on a not loaded entry.""" with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): @@ -382,7 +380,7 @@ async def test_reauth_flow_update_configuration_on_not_loaded_entry( @pytest.mark.parametrize("wlan_payload", [WLANS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_advanced_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test advanced config flow options.""" config_entry = config_entry_setup @@ -466,7 +464,7 @@ async def test_advanced_option_flow( @pytest.mark.parametrize("client_payload", [CLIENTS]) async def test_simple_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test simple config flow options.""" config_entry = config_entry_setup @@ -535,9 +533,8 @@ async def test_form_ssdp(hass: HomeAssistant) -> None: } -async def test_form_ssdp_aborts_if_host_already_exists( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_form_ssdp_aborts_if_host_already_exists(hass: HomeAssistant) -> None: """Test we abort if the host is already configured.""" result = await hass.config_entries.flow.async_init( UNIFI_DOMAIN, @@ -557,9 +554,8 @@ async def test_form_ssdp_aborts_if_host_already_exists( assert result["reason"] == "already_configured" -async def test_form_ssdp_aborts_if_serial_already_exists( - hass: HomeAssistant, config_entry: ConfigEntry -) -> None: +@pytest.mark.usefixtures("config_entry") +async def test_form_ssdp_aborts_if_serial_already_exists(hass: HomeAssistant) -> None: """Test we abort if the serial is already configured.""" result = await hass.config_entries.flow.async_init( diff --git a/tests/components/unifi/test_device_tracker.py b/tests/components/unifi/test_device_tracker.py index 984fe50753f..c653370656d 100644 --- a/tests/components/unifi/test_device_tracker.py +++ b/tests/components/unifi/test_device_tracker.py @@ -1,20 +1,20 @@ """The tests for the UniFi Network device tracker platform.""" -from collections.abc import Callable from datetime import timedelta from types import MappingProxyType from typing import Any +from unittest.mock import patch from aiounifi.models.event import EventKey from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest +from syrupy import SnapshotAssertion from homeassistant.components.device_tracker import DOMAIN as TRACKER_DOMAIN from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, CONF_CLIENT_SOURCE, - CONF_DETECTION_TIME, CONF_IGNORE_WIRED_BUG, CONF_SSID_FILTER, CONF_TRACK_CLIENTS, @@ -23,13 +23,18 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform WIRED_CLIENT_1 = { "hostname": "wd_client_1", @@ -85,6 +90,25 @@ SWITCH_1 = { } +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT_1, WIRELESS_CLIENT_1]]) +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("mock_device_registry") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.DEVICE_TRACKER]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize( "client_payload", [[WIRELESS_CLIENT_1, WIRED_BUG_CLIENT, UNSEEN_CLIENT]] ) @@ -92,8 +116,8 @@ SWITCH_1 = { @pytest.mark.usefixtures("mock_device_registry") async def test_client_state_update( hass: HomeAssistant, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Verify tracking of wireless clients.""" @@ -145,7 +169,7 @@ async def test_client_state_update( async def test_client_state_from_event_source( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, client_payload: list[dict[str, Any]], ) -> None: """Verify update state of client based on event source.""" @@ -213,67 +237,40 @@ async def test_client_state_from_event_source( assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) +@pytest.mark.usefixtures("mock_device_registry") @pytest.mark.parametrize( - "device_payload", + ("state", "interval", "expected"), [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device 1", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "version": "4.0.42.10433", - }, - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "ip": "10.0.1.2", - "mac": "00:00:00:00:01:02", - "model": "US16P150", - "name": "Device 2", - "next_interval": 20, - "state": 0, - "type": "usw", - "version": "4.0.42.10433", - }, - ] + # Start home, new signal but still home, heartbeat timer triggers away + (1, 20, (STATE_HOME, STATE_HOME, STATE_NOT_HOME)), + # Start away, new signal but still home, heartbeat time do not trigger + (0, 40, (STATE_NOT_HOME, STATE_HOME, STATE_HOME)), ], ) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("mock_device_registry") -async def test_tracked_devices( +async def test_tracked_device_state_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], + state: int, + interval: int, + expected: list[str], ) -> None: """Test the update_items function with some devices.""" - assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.device_1").state == STATE_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_NOT_HOME + device_payload[0] = device_payload[0] | {"state": state} + await config_entry_factory() + assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 + assert hass.states.get("device_tracker.switch_1").state == expected[0] # State change signalling work - device_1 = device_payload[0] - device_1["next_interval"] = 20 - device_2 = device_payload[1] - device_2["state"] = 1 - device_2["next_interval"] = 50 - mock_websocket_message(message=MessageKey.DEVICE, data=[device_1, device_2]) + switch_1 = device_payload[0] | {"state": 1, "next_interval": interval} + mock_websocket_message(message=MessageKey.DEVICE, data=[switch_1]) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + # Too little time has passed + assert hass.states.get("device_tracker.switch_1").state == expected[1] # Change of time can mark device not_home outside of expected reporting interval new_time = dt_util.utcnow() + timedelta(seconds=90) @@ -281,23 +278,24 @@ async def test_tracked_devices( async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + # Heartbeat to update state is interval + 60 seconds + assert hass.states.get("device_tracker.switch_1").state == expected[2] # Disabled device is unavailable - device_1["disabled"] = True - mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + switch_1["disabled"] = True + mock_websocket_message(message=MessageKey.DEVICE, data=switch_1) await hass.async_block_till_done() - assert hass.states.get("device_tracker.device_1").state == STATE_UNAVAILABLE - assert hass.states.get("device_tracker.device_2").state == STATE_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE @pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1, WIRED_CLIENT_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") async def test_remove_clients( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Test the remove_items function with some clients.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 @@ -313,68 +311,35 @@ async def test_remove_clients( assert hass.states.get("device_tracker.wd_client_1") -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "essid": "ssid", - "hostname": "client", - "is_wired": False, - "last_seen": 1562600145, - "mac": "00:00:00:00:00:01", - } - ] - ], -) -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "version": "4.0.42.10433", - } - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRELESS_CLIENT_1]]) +@pytest.mark.parametrize("device_payload", [[SWITCH_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, + mock_websocket_state: WebsocketStateManager, +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_HOME # Controller unavailable await mock_websocket_state.disconnect() - assert hass.states.get("device_tracker.client").state == STATE_UNAVAILABLE - assert hass.states.get("device_tracker.device").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.ws_client_1").state == STATE_UNAVAILABLE + assert hass.states.get("device_tracker.switch_1").state == STATE_UNAVAILABLE # Controller available await mock_websocket_state.reconnect() - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME - assert hass.states.get("device_tracker.device").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.switch_1").state == STATE_HOME @pytest.mark.usefixtures("mock_device_registry") async def test_option_ssid_filter( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the SSID filter works. @@ -383,13 +348,7 @@ async def test_option_ssid_filter( Client on SSID2 will be removed on change of options. """ client_payload += [ - { - "essid": "ssid", - "hostname": "client", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - }, + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())}, { "essid": "ssid2", "hostname": "client_on_ssid2", @@ -401,7 +360,7 @@ async def test_option_ssid_filter( config_entry = await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_NOT_HOME # Setting SSID filter will remove clients outside of filter @@ -411,33 +370,29 @@ async def test_option_ssid_filter( await hass.async_block_till_done() # Not affected by SSID filter - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Removed due to SSID filter assert not hass.states.get("device_tracker.client_on_ssid2") # Roams to SSID outside of filter - client = client_payload[0] - client["essid"] = "other_ssid" - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] | {"essid": "other_ssid"} + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) # Data update while SSID filter is in effect shouldn't create the client - client_on_ssid2 = client_payload[1] - client_on_ssid2["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) + client_on_ssid2 = client_payload[1] | { + "last_seen": dt_util.as_timestamp(dt_util.utcnow()) + } mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time = dt_util.utcnow() + timedelta( - seconds=( - config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 - ) - ) + new_time = dt_util.utcnow() + timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # SSID filter marks client as away - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # SSID still outside of filter assert not hass.states.get("device_tracker.client_on_ssid2") @@ -446,25 +401,23 @@ async def test_option_ssid_filter( hass.config_entries.async_update_entry(config_entry, options={CONF_SSID_FILTER: []}) await hass.async_block_till_done() - client["last_seen"] += 1 + ws_client_1["last_seen"] += 1 client_on_ssid2["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=[client, client_on_ssid2]) + mock_websocket_message( + message=MessageKey.CLIENT, data=[ws_client_1, client_on_ssid2] + ) await hass.async_block_till_done() - assert hass.states.get("device_tracker.client").state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME assert hass.states.get("device_tracker.client_on_ssid2").state == STATE_HOME # Time pass to mark client as away - new_time += timedelta( - seconds=( - config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) + 1 - ) - ) + new_time += timedelta(seconds=(DEFAULT_DETECTION_TIME + 1)) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() - assert hass.states.get("device_tracker.client").state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME client_on_ssid2["last_seen"] += 1 mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) @@ -478,9 +431,7 @@ async def test_option_ssid_filter( mock_websocket_message(message=MessageKey.CLIENT, data=client_on_ssid2) await hass.async_block_till_done() - new_time += timedelta( - seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) - ) + new_time += timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() @@ -492,7 +443,7 @@ async def test_option_ssid_filter( async def test_wireless_client_go_wired_issue( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the solution to catch wireless device go wired UniFi issue. @@ -500,64 +451,51 @@ async def test_wireless_client_go_wired_issue( UniFi Network has a known issue that when a wireless device goes away it sometimes gets marked as wired. """ client_payload.append( - { - "essid": "ssid", - "hostname": "client", - "ip": "10.0.0.1", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - } + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} ) - config_entry = await config_entry_factory() + await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Trigger wired bug - client = client_payload[0] - client["last_seen"] = dt_util.as_timestamp(dt_util.utcnow()) - client["is_wired"] = True - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] | { + "last_seen": dt_util.as_timestamp(dt_util.utcnow()), + "is_wired": True, + } + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Wired bug fix keeps client marked as wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Pass time - new_time = dt_util.utcnow() + timedelta( - seconds=(config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME)) - ) + new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Marked as home according to the timer - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Try to mark client as connected - client["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Make sure it don't go online again until wired bug disappears - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Make client wireless - client["last_seen"] += 1 - client["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + ws_client_1["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Client is no longer affected by wired bug and can be marked online - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME @pytest.mark.parametrize("config_entry_options", [{CONF_IGNORE_WIRED_BUG: True}]) @@ -565,69 +503,54 @@ async def test_wireless_client_go_wired_issue( async def test_option_ignore_wired_bug( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test option to ignore wired bug.""" client_payload.append( - { - "ap_mac": "00:00:00:00:02:01", - "essid": "ssid", - "hostname": "client", - "ip": "10.0.0.1", - "is_wired": False, - "last_seen": dt_util.as_timestamp(dt_util.utcnow()), - "mac": "00:00:00:00:00:01", - } + WIRELESS_CLIENT_1 | {"last_seen": dt_util.as_timestamp(dt_util.utcnow())} ) - config_entry = await config_entry_factory() + await config_entry_factory() assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 1 # Client is wireless - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Trigger wired bug - client = client_payload[0] - client["is_wired"] = True - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1 = client_payload[0] + ws_client_1["is_wired"] = True + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Wired bug in effect - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME - # pass time - new_time = dt_util.utcnow() + timedelta( - seconds=config_entry.options.get(CONF_DETECTION_TIME, DEFAULT_DETECTION_TIME) - ) + # Pass time + new_time = dt_util.utcnow() + timedelta(seconds=DEFAULT_DETECTION_TIME) with freeze_time(new_time): async_fire_time_changed(hass, new_time) await hass.async_block_till_done() # Timer marks client as away - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_NOT_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME # Mark client as connected again - client["last_seen"] += 1 - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Ignoring wired bug allows client to go home again even while affected - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME # Make client wireless - client["last_seen"] += 1 - client["is_wired"] = False - mock_websocket_message(message=MessageKey.CLIENT, data=client) + ws_client_1["last_seen"] += 1 + ws_client_1["is_wired"] = False + mock_websocket_message(message=MessageKey.CLIENT, data=ws_client_1) await hass.async_block_till_done() # Client is wireless and still connected - client_state = hass.states.get("device_tracker.client") - assert client_state.state == STATE_HOME + assert hass.states.get("device_tracker.ws_client_1").state == STATE_HOME @pytest.mark.parametrize( @@ -657,8 +580,8 @@ async def test_option_ignore_wired_bug( async def test_restoring_client( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry: ConfigEntry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry: MockConfigEntry, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], clients_all_payload: list[dict[str, Any]], ) -> None: @@ -731,10 +654,10 @@ async def test_restoring_client( @pytest.mark.usefixtures("mock_device_registry") async def test_config_entry_options_track( hass: HomeAssistant, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], counts: tuple[int], - expected: dict[tuple[bool | None]], + expected: tuple[tuple[bool | None, ...], ...], ) -> None: """Test the different config entry options. diff --git a/tests/components/unifi/test_diagnostics.py b/tests/components/unifi/test_diagnostics.py index fcaba59cbad..80359a9c75c 100644 --- a/tests/components/unifi/test_diagnostics.py +++ b/tests/components/unifi/test_diagnostics.py @@ -2,15 +2,16 @@ import pytest from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -121,11 +122,10 @@ DPI_GROUP_DATA = [ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, config_entry_setup) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, config_entry_setup + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/unifi/test_hub.py b/tests/components/unifi/test_hub.py index 0d75a83c5f5..af134c7449b 100644 --- a/tests/components/unifi/test_hub.py +++ b/tests/components/unifi/test_hub.py @@ -1,6 +1,5 @@ """Test UniFi Network.""" -from collections.abc import Callable from http import HTTPStatus from types import MappingProxyType from typing import Any @@ -12,18 +11,21 @@ import pytest from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.components.unifi.hub import get_unifi_api -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util +from .conftest import ConfigEntryFactoryType, WebsocketStateManager + +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker async def test_hub_setup( device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Successful setup.""" with patch( @@ -54,7 +56,7 @@ async def test_hub_setup( async def test_reset_after_successful_setup( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -64,7 +66,7 @@ async def test_reset_after_successful_setup( async def test_reset_fails( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup can return false.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -80,8 +82,8 @@ async def test_reset_fails( @pytest.mark.usefixtures("mock_device_registry") async def test_connection_state_signalling( hass: HomeAssistant, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify connection statesignalling and connection state are working.""" @@ -110,8 +112,8 @@ async def test_connection_state_signalling( async def test_reconnect_mechanism( aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, ) -> None: """Verify reconnect prints only on first reconnection try.""" aioclient_mock.clear_requests() @@ -140,7 +142,10 @@ async def test_reconnect_mechanism( ], ) @pytest.mark.usefixtures("config_entry_setup") -async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) -> None: +async def test_reconnect_mechanism_exceptions( + mock_websocket_state: WebsocketStateManager, + exception: Exception, +) -> None: """Verify async_reconnect calls expected methods.""" with ( patch("aiounifi.Controller.login", side_effect=exception), @@ -170,8 +175,8 @@ async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) - ) async def test_get_unifi_api_fails_to_connect( hass: HomeAssistant, - side_effect, - raised_exception, + side_effect: Exception, + raised_exception: Exception, config_entry_data: MappingProxyType[str, Any], ) -> None: """Check that get_unifi_api can handle UniFi Network being unavailable.""" diff --git a/tests/components/unifi/test_image.py b/tests/components/unifi/test_image.py index 75d2f02900d..dc37d7cb8b7 100644 --- a/tests/components/unifi/test_image.py +++ b/tests/components/unifi/test_image.py @@ -3,22 +3,41 @@ from copy import deepcopy from datetime import timedelta from http import HTTPStatus +from typing import Any +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import STATE_UNAVAILABLE, EntityCategory +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator + +@pytest.fixture(autouse=True) +def mock_getrandbits(): + """Mock image access token which normally is randomized.""" + with patch( + "homeassistant.components.image.SystemRandom.getrandbits", + return_value=1, + ): + yield + + WLAN = { "_id": "012345678910111213141516", "bc_filter_enabled": False, @@ -56,6 +75,32 @@ WLAN = { } +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.IMAGE]): + config_entry = await config_entry_factory() + if site_payload[0]["role"] == "admin": + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + else: + assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 + + @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") async def test_wlan_qr_code( @@ -63,16 +108,13 @@ async def test_wlan_qr_code( entity_registry: er.EntityRegistry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, - mock_websocket_message, - mock_websocket_state, + mock_websocket_message: WebsocketMessageMock, ) -> None: """Test the update_clients function when no clients are found.""" assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 ent_reg_entry = entity_registry.async_get("image.ssid_1_qr_code") - assert ent_reg_entry.unique_id == "qr_code-012345678910111213141516" assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -84,10 +126,6 @@ async def test_wlan_qr_code( ) await hass.async_block_till_done() - # Validate state object - image_state_1 = hass.states.get("image.ssid_1_qr_code") - assert image_state_1.name == "SSID 1 QR Code" - # Validate image client = await hass_client() resp = await client.get("/api/image_proxy/image.ssid_1_qr_code") @@ -96,8 +134,8 @@ async def test_wlan_qr_code( assert body == snapshot # Update state object - same password - no change to state + image_state_1 = hass.states.get("image.ssid_1_qr_code") mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=WLAN) - await hass.async_block_till_done() image_state_2 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state == image_state_2.state @@ -105,7 +143,6 @@ async def test_wlan_qr_code( data = deepcopy(WLAN) data["x_passphrase"] = "new password" mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=data) - await hass.async_block_till_done() image_state_3 = hass.states.get("image.ssid_1_qr_code") assert image_state_1.state != image_state_3.state @@ -116,25 +153,41 @@ async def test_wlan_qr_code( body = await resp.read() assert body == snapshot - # Availability signalling - # Controller disconnects +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: + """Verify entities state reflect on hub becoming unavailable.""" + assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + + # Controller unavailable await mock_websocket_state.disconnect() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE - # Controller reconnects + # Controller available await mock_websocket_state.reconnect() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("config_entry_setup") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_source_availability( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: + """Verify entities state reflect on source becoming unavailable.""" + assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE + # WLAN gets disabled wlan_1 = deepcopy(WLAN) wlan_1["enabled"] = False mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state == STATE_UNAVAILABLE # WLAN gets re-enabled wlan_1["enabled"] = True mock_websocket_message(message=MessageKey.WLAN_CONF_UPDATED, data=wlan_1) - await hass.async_block_till_done() assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index 7cd203ab8fd..68f80555cd6 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -1,6 +1,5 @@ """Test UniFi Network integration setup process.""" -from collections.abc import Callable from typing import Any from unittest.mock import patch @@ -13,29 +12,25 @@ from homeassistant.components.unifi.const import ( CONF_ALLOW_UPTIME_SENSORS, CONF_TRACK_CLIENTS, CONF_TRACK_DEVICES, - DOMAIN as UNIFI_DOMAIN, ) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import DEFAULT_CONFIG_ENTRY_ID +from .conftest import ( + DEFAULT_CONFIG_ENTRY_ID, + ConfigEntryFactoryType, + WebsocketMessageMock, +) from tests.common import flush_store -from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -async def test_setup_with_no_config(hass: HomeAssistant) -> None: - """Test that we do not discover anything or try to set up a hub.""" - assert await async_setup_component(hass, UNIFI_DOMAIN, {}) is True - assert UNIFI_DOMAIN not in hass.data - - async def test_setup_entry_fails_config_entry_not_ready( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with patch( @@ -48,7 +43,7 @@ async def test_setup_entry_fails_config_entry_not_ready( async def test_setup_entry_fails_trigger_reauth_flow( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with ( @@ -86,7 +81,7 @@ async def test_setup_entry_fails_trigger_reauth_flow( async def test_wireless_clients( hass: HomeAssistant, hass_storage: dict[str, Any], - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Verify wireless clients class.""" hass_storage[unifi.STORAGE_KEY] = { @@ -170,13 +165,11 @@ async def test_wireless_clients( ) async def test_remove_config_entry_device( hass: HomeAssistant, - hass_storage: dict[str, Any], - aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], device_payload: list[dict[str, Any]], - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, hass_ws_client: WebSocketGenerator, ) -> None: """Verify removing a device manually.""" diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 48e524aef76..5af4b297847 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1,6 +1,5 @@ """UniFi Network sensor platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import datetime, timedelta from types import MappingProxyType @@ -29,7 +28,7 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DEVICE_STATES, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -42,7 +41,13 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed DEVICE_1 = { "board_rev": 2, @@ -362,9 +367,9 @@ async def test_no_clients(hass: HomeAssistant) -> None: ) async def test_bandwidth_sensors( hass: HomeAssistant, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, config_entry_options: MappingProxyType[str, Any], - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" @@ -459,115 +464,6 @@ async def test_bandwidth_sensors( assert hass.states.get("sensor.wired_client_tx") -@pytest.mark.parametrize( - "config_entry_options", - [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: False, - CONF_ALLOW_UPTIME_SENSORS: True, - CONF_TRACK_CLIENTS: False, - CONF_TRACK_DEVICES: False, - } - ], -) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "mac": "00:00:00:00:00:01", - "name": "client1", - "oui": "Producer", - "uptime": 0, - } - ] - ], -) -@pytest.mark.parametrize( - ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), - [ - # Uptime listed in epoch time should never change - (1609462800, 1609462800, 1609462800, 1612141200), - # Uptime counted in seconds increases with every event - (60, 240, 480, 60), - ], -) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_uptime_sensors( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - freezer: FrozenDateTimeFactory, - mock_websocket_message, - config_entry_options: MappingProxyType[str, Any], - config_entry_factory: Callable[[], ConfigEntry], - client_payload: list[dict[str, Any]], - initial_uptime, - event_uptime, - small_variation_uptime, - new_uptime, -) -> None: - """Verify that uptime sensors are working as expected.""" - uptime_client = client_payload[0] - uptime_client["uptime"] = initial_uptime - freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) - config_entry = await config_entry_factory() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" - assert ( - entity_registry.async_get("sensor.client1_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - - # Verify normal new event doesn't change uptime - # 4 minutes have passed - uptime_client["uptime"] = event_uptime - now = datetime(2021, 1, 1, 1, 4, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) - await hass.async_block_till_done() - - assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify small variation of uptime (<120 seconds) is ignored - # 15 seconds variation after 8 minutes - uptime_client["uptime"] = small_variation_uptime - now = datetime(2021, 1, 1, 1, 8, 15, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) - - assert hass.states.get("sensor.client1_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify new event change uptime - # 1 month has passed - uptime_client["uptime"] = new_uptime - now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.CLIENT, data=uptime_client) - await hass.async_block_till_done() - - assert hass.states.get("sensor.client1_uptime").state == "2021-02-01T01:00:00+00:00" - - # Disable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = False - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 0 - assert hass.states.get("sensor.client1_uptime") is None - - # Enable option - options = deepcopy(config_entry_options) - options[CONF_ALLOW_UPTIME_SENSORS] = True - with patch("homeassistant.util.dt.now", return_value=now): - hass.config_entries.async_update_entry(config_entry, options=options) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - assert hass.states.get("sensor.client1_uptime") - - @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], @@ -600,7 +496,9 @@ async def test_uptime_sensors( @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Verify removing of clients work as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 @@ -629,8 +527,8 @@ async def test_remove_sensors( async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 @@ -703,9 +601,9 @@ async def test_poe_port_switches( async def test_wlan_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify that WLAN client sensors are working as expected.""" @@ -845,13 +743,13 @@ async def test_wlan_client_sensors( async def test_outlet_power_readings( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, expected_unique_id: str, - expected_value: any, + expected_value: Any, changed_data: dict | None, - expected_update_value: any, + expected_update_value: Any, ) -> None: """Test the outlet power reporting on PDU devices.""" assert len(hass.states.async_all()) == 13 @@ -876,81 +774,6 @@ async def test_outlet_power_readings( assert sensor_data.state == expected_update_value -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - } - ] - ], -) -async def test_device_uptime( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], - device_payload: list[dict[str, Any]], -) -> None: - """Verify that uptime sensors are working as expected.""" - now = datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - await config_entry_factory() - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" - - assert ( - entity_registry.async_get("sensor.device_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - - # Verify normal new event doesn't change uptime - # 4 minutes have passed - device = device_payload[0] - device["uptime"] = 240 - now = datetime(2021, 1, 1, 1, 4, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify small variation of uptime (<120 seconds) is ignored - # 15 seconds variation after 8 minutes - device = device_payload[0] - device["uptime"] = 480 - now = datetime(2021, 1, 1, 1, 8, 15, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get("sensor.device_uptime").state == "2021-01-01T01:00:00+00:00" - - # Verify new event change uptime - # 1 month has passed - - device["uptime"] = 60 - now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) - with patch("homeassistant.util.dt.now", return_value=now): - mock_websocket_message(message=MessageKey.DEVICE, data=device) - - assert hass.states.get("sensor.device_uptime").state == "2021-02-01T01:00:00+00:00" - - @pytest.mark.parametrize( "device_payload", [ @@ -982,7 +805,7 @@ async def test_device_uptime( async def test_device_temperature( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" @@ -1031,7 +854,7 @@ async def test_device_temperature( async def test_device_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that state sensors are working as expected.""" @@ -1068,7 +891,7 @@ async def test_device_state( async def test_device_system_stats( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that device stats sensors are working as expected.""" @@ -1163,9 +986,9 @@ async def test_device_system_stats( async def test_bandwidth_port_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that port bandwidth sensors are working as expected.""" @@ -1280,9 +1103,9 @@ async def test_bandwidth_port_sensors( async def test_device_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, - mock_websocket_message, - client_payload, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + client_payload: dict[str, Any], ) -> None: """Verify that WLAN client sensors are working as expected.""" client_payload += [ @@ -1425,3 +1248,410 @@ async def test_sensor_sources( assert state.attributes.get(ATTR_STATE_CLASS) == snapshot assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == snapshot assert state.state == snapshot + + +async def _test_uptime_entity( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, + payload: dict[str, Any], + entity_id: str, + message_key: MessageKey, + initial_uptime: int, + event_uptime: int, + small_variation_uptime: int, + new_uptime: int, +) -> None: + """Verify that uptime entities are working as expected.""" + payload["uptime"] = initial_uptime + freezer.move_to(datetime(2021, 1, 1, 1, 1, 0, tzinfo=dt_util.UTC)) + config_entry = await config_entry_factory() + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify normal new event doesn't change uptime + # 4 minutes have passed + + payload["uptime"] = event_uptime + now = datetime(2021, 1, 1, 1, 4, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify small variation of uptime (<120 seconds) is ignored + # 15 seconds variation after 8 minutes + + payload["uptime"] = small_variation_uptime + now = datetime(2021, 1, 1, 1, 8, 15, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + + assert hass.states.get(entity_id).state == "2021-01-01T01:00:00+00:00" + + # Verify new event change uptime + # 1 month has passed + + payload["uptime"] = new_uptime + now = datetime(2021, 2, 1, 1, 1, 0, tzinfo=dt_util.UTC) + with patch("homeassistant.util.dt.now", return_value=now): + mock_websocket_message(message=message_key, data=payload) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == "2021-02-01T01:00:00+00:00" + + return config_entry + + +@pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_UPTIME_SENSORS: True}]) +@pytest.mark.parametrize( + "client_payload", + [ + [ + { + "mac": "00:00:00:00:00:01", + "name": "client1", + "oui": "Producer", + "uptime": 0, + } + ] + ], +) +@pytest.mark.parametrize( + ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), + [ + # Uptime listed in epoch time should never change + (1609462800, 1609462800, 1609462800, 1612141200), + # Uptime counted in seconds increases with every event + (60, 240, 480, 60), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_client_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + config_entry_options: MappingProxyType[str, Any], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], + initial_uptime, + event_uptime, + small_variation_uptime, + new_uptime, +) -> None: + """Verify that client uptime sensors are working as expected.""" + config_entry = await _test_uptime_entity( + hass, + freezer, + mock_websocket_message, + config_entry_factory, + payload=client_payload[0], + entity_id="sensor.client1_uptime", + message_key=MessageKey.CLIENT, + initial_uptime=initial_uptime, + event_uptime=event_uptime, + small_variation_uptime=small_variation_uptime, + new_uptime=new_uptime, + ) + + assert ( + entity_registry.async_get("sensor.client1_uptime").entity_category + is EntityCategory.DIAGNOSTIC + ) + + # Disable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = False + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert hass.states.get("sensor.client1_uptime") is None + + # Enable option + options = deepcopy(config_entry_options) + options[CONF_ALLOW_UPTIME_SENSORS] = True + hass.config_entries.async_update_entry(config_entry, options=options) + await hass.async_block_till_done() + + assert hass.states.get("sensor.client1_uptime") + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + } + ] + ], +) +async def test_device_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + device_payload: list[dict[str, Any]], +) -> None: + """Verify that device uptime sensors are working as expected.""" + await _test_uptime_entity( + hass, + freezer, + mock_websocket_message, + config_entry_factory, + payload=device_payload[0], + entity_id="sensor.device_uptime", + message_key=MessageKey.DEVICE, + initial_uptime=60, + event_uptime=240, + small_variation_uptime=480, + new_uptime=60, + ) + + assert ( + entity_registry.async_get("sensor.device_uptime").entity_category + is EntityCategory.DIAGNOSTIC + ) + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.parametrize( + ("entity_id", "state", "updated_state", "index_to_update"), + [ + # Microsoft + ("microsoft_wan", "56", "20", 0), + # Google + ("google_wan", "53", "90", 1), + # Cloudflare + ("cloudflare_wan", "30", "80", 2), + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message: WebsocketMessageMock, + device_payload: list[dict[str, Any]], + entity_id: str, + state: str, + updated_state: str, + index_to_update: int, +) -> None: + """Verify that wan latency sensors are working as expected.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get(f"sensor.mock_name_{entity_id}_latency") + assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert latency_entry.entity_category is EntityCategory.DIAGNOSTIC + + # Enable entity + entity_registry.async_update_entity( + entity_id=f"sensor.mock_name_{entity_id}_latency", disabled_by=None + ) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor attributes and state + latency_entry = hass.states.get(f"sensor.mock_name_{entity_id}_latency") + assert latency_entry.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DURATION + assert ( + latency_entry.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + ) + assert latency_entry.state == state + + # Verify state update + device = device_payload[0] + device["uptime_stats"]["WAN"]["monitors"][index_to_update]["latency_average"] = ( + updated_state + ) + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert ( + hass.states.get(f"sensor.mock_name_{entity_id}_latency").state == updated_state + ) + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "monitors": [ + { + "availability": 100.0, + "latency_average": 30, + "target": "1.2.3.4", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_entries( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None diff --git a/tests/components/unifi/test_services.py b/tests/components/unifi/test_services.py index e3b03bc868d..a7968a92e22 100644 --- a/tests/components/unifi/test_services.py +++ b/tests/components/unifi/test_services.py @@ -10,11 +10,11 @@ from homeassistant.components.unifi.services import ( SERVICE_RECONNECT_CLIENT, SERVICE_REMOVE_CLIENTS, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -25,7 +25,7 @@ async def test_reconnect_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify call to reconnect client is performed as expected.""" @@ -69,7 +69,7 @@ async def test_reconnect_device_without_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if device does not have a known mac.""" aioclient_mock.clear_requests() @@ -95,7 +95,7 @@ async def test_reconnect_client_hub_unavailable( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if hub is unavailable.""" @@ -127,7 +127,7 @@ async def test_reconnect_client_unknown_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if trying to reconnect a mac unknown to hub.""" aioclient_mock.clear_requests() @@ -152,7 +152,7 @@ async def test_reconnect_wired_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if client is wired.""" @@ -204,7 +204,7 @@ async def test_reconnect_wired_client( async def test_remove_clients( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify removing different variations of clients work.""" aioclient_mock.clear_requests() @@ -288,8 +288,8 @@ async def test_services_handle_unloaded_config_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: ConfigEntry, - clients_all_payload, + config_entry_setup: MockConfigEntry, + clients_all_payload: dict[str, Any], ) -> None: """Verify no call is made if config entry is unloaded.""" await hass.config_entries.async_unload(config_entry_setup.entry_id) diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index b0ae8bde445..6d85437a244 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -1,6 +1,5 @@ """UniFi Network switch platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import timedelta from typing import Any @@ -22,7 +21,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_DEVICES, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -37,9 +36,14 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .conftest import CONTROLLER_HOST +from .conftest import ( + CONTROLLER_HOST, + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -774,6 +778,37 @@ PORT_FORWARD_PLEX = { "src": "any", } +TRAFFIC_RULE = { + "_id": "6452cd9b859d5b11aa002ea1", + "action": "BLOCK", + "app_category_ids": [], + "app_ids": [], + "bandwidth_limit": { + "download_limit_kbps": 1024, + "enabled": False, + "upload_limit_kbps": 1024, + }, + "description": "Test Traffic Rule", + "name": "Test Traffic Rule", + "domains": [], + "enabled": True, + "ip_addresses": [], + "ip_ranges": [], + "matching_target": "INTERNET", + "network_ids": [], + "regions": [], + "schedule": { + "date_end": "2023-05-10", + "date_start": "2023-05-03", + "mode": "ALWAYS", + "repeat_on_days": [], + "time_all_day": False, + "time_range_end": "12:00", + "time_range_start": "09:00", + }, + "target_devices": [{"client_mac": CLIENT_1["mac"], "type": "CLIENT"}], +} + @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @@ -810,12 +845,11 @@ async def test_not_admin(hass: HomeAssistant) -> None: @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) -@pytest.mark.usefixtures("config_entry_setup") async def test_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 @@ -899,7 +933,9 @@ async def test_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_remove_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -936,8 +972,8 @@ async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> N async def test_block_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -996,7 +1032,9 @@ async def test_block_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_dpi_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1022,7 +1060,7 @@ async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") async def test_dpi_switches_add_second_app( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1072,6 +1110,62 @@ async def test_dpi_switches_add_second_app( assert hass.states.get("switch.block_media_streaming").state == STATE_ON +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +async def test_traffic_rules( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + config_entry_setup: MockConfigEntry, + traffic_rule_payload: list[dict[str, Any]], +) -> None: + """Test control of UniFi traffic rules.""" + + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + + # Validate state object + switch_1 = hass.states.get("switch.unifi_network_test_traffic_rule") + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + + traffic_rule = deepcopy(traffic_rule_payload[0]) + + # Disable traffic rule + aioclient_mock.put( + f"https://{config_entry_setup.data[CONF_HOST]}:1234" + f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}/trafficrules/{traffic_rule['_id']}", + ) + + call_count = aioclient_mock.call_count + + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_off", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + # Updating the value for traffic rules will make another call to retrieve the values + assert aioclient_mock.call_count == call_count + 2 + expected_disable_call = deepcopy(traffic_rule) + expected_disable_call["enabled"] = False + + assert aioclient_mock.mock_calls[call_count][2] == expected_disable_call + + call_count = aioclient_mock.call_count + + # Enable traffic rule + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_on", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + + expected_enable_call = deepcopy(traffic_rule) + expected_enable_call["enabled"] = True + + assert aioclient_mock.call_count == call_count + 2 + assert aioclient_mock.mock_calls[call_count][2] == expected_enable_call + + @pytest.mark.parametrize( ("device_payload", "entity_id", "outlet_index", "expected_switches"), [ @@ -1083,8 +1177,8 @@ async def test_dpi_switches_add_second_app( async def test_outlet_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, device_payload: list[dict[str, Any]], entity_id: str, outlet_index: int, @@ -1179,7 +1273,7 @@ async def test_outlet_switches( ) @pytest.mark.usefixtures("config_entry_setup") async def test_new_client_discovered_on_block_control( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test if 2nd update has a new client.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 @@ -1197,7 +1291,9 @@ async def test_new_client_discovered_on_block_control( ) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED]]) async def test_option_block_clients( - hass: HomeAssistant, config_entry_setup: ConfigEntry, clients_all_payload + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, + clients_all_payload: list[dict[str, Any]], ) -> None: """Test the changes to option reflects accordingly.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1245,7 +1341,7 @@ async def test_option_block_clients( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_option_remove_switches( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test removal of DPI switch when options updated.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1263,8 +1359,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1362,8 +1458,8 @@ async def test_wlan_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" @@ -1418,8 +1514,8 @@ async def test_port_forwarding_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" @@ -1517,9 +1613,9 @@ async def test_port_forwarding_switches( async def test_updating_unique_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory: Callable[[], ConfigEntry], - config_entry: ConfigEntry, - device_payload, + config_entry_factory: ConfigEntryFactoryType, + config_entry: MockConfigEntry, + device_payload: list[dict[str, Any]], ) -> None: """Verify outlet control and poe control unique ID update works.""" entity_registry.async_get_or_create( @@ -1555,7 +1651,9 @@ async def test_updating_unique_id( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" entity_ids = ( "switch.block_client_2", diff --git a/tests/components/unifi/test_update.py b/tests/components/unifi/test_update.py index 3b1de6c4456..7bf4b9aec9d 100644 --- a/tests/components/unifi/test_update.py +++ b/tests/components/unifi/test_update.py @@ -1,9 +1,11 @@ """The tests for the UniFi Network update platform.""" from copy import deepcopy +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion from yarl import URL from homeassistant.components.unifi.const import CONF_SITE_ID @@ -13,23 +15,28 @@ from homeassistant.components.update import ( ATTR_LATEST_VERSION, DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, - UpdateDeviceClass, - UpdateEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker +# Device with new firmware available DEVICE_1 = { "board_rev": 3, "device_id": "mock-id", @@ -46,6 +53,7 @@ DEVICE_1 = { "upgrade_to_firmware": "4.3.17.11279", } +# Device without new firmware available DEVICE_2 = { "board_rev": 3, "device_id": "mock-id", @@ -61,43 +69,40 @@ DEVICE_2 = { @pytest.mark.parametrize("device_payload", [[DEVICE_1, DEVICE_2]]) +@pytest.mark.parametrize( + "site_payload", + [ + [{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}], + [{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}], + ], +) +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.UPDATE]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_device_updates( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some devices.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 2 - - # Device with new firmware available - device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON - assert device_1_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" - assert device_1_state.attributes[ATTR_LATEST_VERSION] == "4.3.17.11279" assert device_1_state.attributes[ATTR_IN_PROGRESS] is False - assert device_1_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE - assert ( - device_1_state.attributes[ATTR_SUPPORTED_FEATURES] - == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL - ) - - # Device without new firmware available - - device_2_state = hass.states.get("update.device_2") - assert device_2_state.state == STATE_OFF - assert device_2_state.attributes[ATTR_INSTALLED_VERSION] == "4.0.42.10433" - assert device_2_state.attributes[ATTR_LATEST_VERSION] == "4.0.42.10433" - assert device_2_state.attributes[ATTR_IN_PROGRESS] is False - assert device_2_state.attributes[ATTR_DEVICE_CLASS] == UpdateDeviceClass.FIRMWARE - assert ( - device_2_state.attributes[ATTR_SUPPORTED_FEATURES] - == UpdateEntityFeature.PROGRESS | UpdateEntityFeature.INSTALL - ) # Simulate start of update device_1 = deepcopy(DEVICE_1) device_1["state"] = 4 mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON @@ -112,7 +117,6 @@ async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> No device_1["upgradable"] = False del device_1["upgrade_to_firmware"] mock_websocket_message(message=MessageKey.DEVICE, data=device_1) - await hass.async_block_till_done() device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_OFF @@ -121,30 +125,13 @@ async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> No assert device_1_state.attributes[ATTR_IN_PROGRESS] is False -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_admin(hass: HomeAssistant) -> None: - """Test that the INSTALL feature is not available on a non-admin account.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 - device_state = hass.states.get("update.device_1") - assert device_state.state == STATE_ON - assert ( - device_state.attributes[ATTR_SUPPORTED_FEATURES] == UpdateEntityFeature.PROGRESS - ) - - @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) async def test_install( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the device update install call.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 device_state = hass.states.get("update.device_1") assert device_state.state == STATE_ON @@ -174,9 +161,10 @@ async def test_install( @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub becoming unavailable.""" - assert len(hass.states.async_entity_ids(UPDATE_DOMAIN)) == 1 assert hass.states.get("update.device_1").state == STATE_ON # Controller unavailable diff --git a/tests/components/unifiprotect/test_event.py b/tests/components/unifiprotect/test_event.py new file mode 100644 index 00000000000..9d1a701fe39 --- /dev/null +++ b/tests/components/unifiprotect/test_event.py @@ -0,0 +1,154 @@ +"""Test the UniFi Protect event platform.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from unittest.mock import Mock + +from uiprotect.data import Camera, Event, EventType, ModelType, SmartDetectObjectType + +from homeassistant.components.unifiprotect.const import ( + ATTR_EVENT_ID, + DEFAULT_ATTRIBUTION, +) +from homeassistant.components.unifiprotect.event import EVENT_DESCRIPTIONS +from homeassistant.const import ATTR_ATTRIBUTION, Platform +from homeassistant.core import Event as HAEvent, HomeAssistant, callback +from homeassistant.helpers.event import async_track_state_change_event + +from .utils import ( + MockUFPFixture, + adopt_devices, + assert_entity_counts, + ids_from_device_description, + init_entry, + remove_entities, +) + + +async def test_camera_remove( + hass: HomeAssistant, ufp: MockUFPFixture, doorbell: Camera, unadopted_camera: Camera +) -> None: + """Test removing and re-adding a camera device.""" + + ufp.api.bootstrap.nvr.system_info.ustorage = None + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + await remove_entities(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 0, 0) + await adopt_devices(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + + +async def test_doorbell_ring( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test a doorbell ring event.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.EVENT, 1, 1) + events: list[HAEvent] = [] + + @callback + def _capture_event(event: HAEvent) -> None: + events.append(event) + + _, entity_id = ids_from_device_description( + Platform.EVENT, doorbell, EVENT_DESCRIPTIONS[0] + ) + + unsub = async_track_state_change_event(hass, entity_id, _capture_event) + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=None, + score=100, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + new_camera.last_ring_event_id = "test_event_id" + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + assert len(events) == 1 + state = events[0].data["new_state"] + assert state + timestamp = state.state + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_ID] == "test_event_id" + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=50, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + # Event is already seen and has end, should now be off + state = hass.states.get(entity_id) + assert state + assert state.state == timestamp + + # Now send an event that has an end right away + event = Event( + model=ModelType.EVENT, + id="new_event_id", + type=EventType.RING, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=80, + smart_detect_types=[SmartDetectObjectType.PACKAGE], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + + ufp.ws_msg(mock_msg) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == timestamp + unsub() diff --git a/tests/components/unifiprotect/test_switch.py b/tests/components/unifiprotect/test_switch.py index 6e5c83ef237..9e0e9efa0ce 100644 --- a/tests/components/unifiprotect/test_switch.py +++ b/tests/components/unifiprotect/test_switch.py @@ -35,15 +35,16 @@ CAMERA_SWITCHES_BASIC = [ for d in CAMERA_SWITCHES if ( not d.name.startswith("Detections:") - and d.name != "SSH enabled" - and d.name != "Color night vision" - and d.name != "Tracking: person" - and d.name != "HDR mode" + and d.name + not in {"SSH enabled", "Color night vision", "Tracking: person", "HDR mode"} ) - or d.name == "Detections: motion" - or d.name == "Detections: person" - or d.name == "Detections: vehicle" - or d.name == "Detections: animal" + or d.name + in { + "Detections: motion", + "Detections: person", + "Detections: vehicle", + "Detections: animal", + } ] CAMERA_SWITCHES_NO_EXTRA = [ d diff --git a/tests/components/unifiprotect/utils.py b/tests/components/unifiprotect/utils.py index 21c01f77c5f..25a9ddcbb92 100644 --- a/tests/components/unifiprotect/utils.py +++ b/tests/components/unifiprotect/utils.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable, Sequence from dataclasses import dataclass from datetime import timedelta -from typing import Any from unittest.mock import Mock from uiprotect import ProtectApiClient @@ -41,11 +40,11 @@ class MockUFPFixture: ws_subscription: Callable[[WSSubscriptionMessage], None] | None = None ws_state_subscription: Callable[[WebsocketState], None] | None = None - def ws_msg(self, msg: WSSubscriptionMessage) -> Any: + def ws_msg(self, msg: WSSubscriptionMessage) -> None: """Emit WS message for testing.""" if self.ws_subscription is not None: - return self.ws_subscription(msg) + self.ws_subscription(msg) def reset_objects(bootstrap: Bootstrap): diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 814fa34a125..187b62a93a1 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -325,10 +325,10 @@ async def test_config_bad_children(hass: HomeAssistant) -> None: config_bad_children = {"name": "test", "children": {}, "platform": "universal"} config_no_children = validate_config(config_no_children) - assert [] == config_no_children["children"] + assert config_no_children["children"] == [] config_bad_children = validate_config(config_bad_children) - assert [] == config_bad_children["children"] + assert config_bad_children["children"] == [] async def test_config_bad_commands(hass: HomeAssistant) -> None: @@ -336,7 +336,7 @@ async def test_config_bad_commands(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert {} == config["commands"] + assert config["commands"] == {} async def test_config_bad_attributes(hass: HomeAssistant) -> None: @@ -344,7 +344,7 @@ async def test_config_bad_attributes(hass: HomeAssistant) -> None: config = {"name": "test", "platform": "universal"} config = validate_config(config) - assert {} == config["attributes"] + assert config["attributes"] == {} async def test_config_bad_key(hass: HomeAssistant) -> None: @@ -1280,6 +1280,7 @@ async def test_master_state_with_template(hass: HomeAssistant) -> None: context = Context() hass.states.async_set("input_boolean.test", STATE_ON, context=context) await hass.async_block_till_done() + await hass.async_block_till_done() assert hass.states.get("media_player.tv").state == STATE_OFF assert events[0].context == context diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index 54aeb00e89a..efa6d60c344 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -1,7 +1,7 @@ """Test the UPB Control config flow.""" from asyncio import TimeoutError -from unittest.mock import AsyncMock, PropertyMock, patch +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN @@ -18,6 +18,7 @@ def mocked_upb(sync_complete=True, config_ok=True): upb_mock = AsyncMock() type(upb_mock).network_id = PropertyMock(return_value="42") type(upb_mock).config_ok = PropertyMock(return_value=config_ok) + type(upb_mock).disconnect = MagicMock() if sync_complete: upb_mock.async_connect.side_effect = _upb_lib_connect return patch( diff --git a/tests/components/update/test_device_trigger.py b/tests/components/update/test_device_trigger.py index fa9af863f56..202b3d32509 100644 --- a/tests/components/update/test_device_trigger.py +++ b/tests/components/update/test_device_trigger.py @@ -21,7 +21,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, setup_test_component_platform, ) @@ -31,12 +30,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -182,7 +175,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -253,21 +246,21 @@ async def test_if_fires_on_state_change( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) hass.states.async_set("update.update_available", STATE_ON) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == "update_available device - update.update_available - off - on - None" ) @@ -276,7 +269,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for turn_on and turn_off triggers firing.""" @@ -326,13 +319,13 @@ async def test_if_fires_on_state_change_legacy( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "no_update device - update.update_available - on - off - None" ) @@ -341,7 +334,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], mock_update_entities: list[MockUpdateEntity], ) -> None: """Test for triggers firing with delay.""" @@ -392,16 +385,16 @@ async def test_if_fires_on_state_change_with_for( state = hass.states.get("update.update_available") assert state assert state.state == STATE_ON - assert not calls + assert not service_calls hass.states.async_set("update.update_available", STATE_OFF) await hass.async_block_till_done() - assert not calls + assert not service_calls async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "turn_off device - update.update_available - on - off - 0:00:05" ) diff --git a/tests/components/update/test_init.py b/tests/components/update/test_init.py index b37abc2263a..7860c679f37 100644 --- a/tests/components/update/test_init.py +++ b/tests/components/update/test_init.py @@ -1,9 +1,9 @@ """The tests for the Update component.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.update import ( ATTR_BACKUP, diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 0bfcd062ac0..1431ce2c9ef 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -2,11 +2,14 @@ from __future__ import annotations +from collections.abc import Generator import copy from datetime import datetime +import socket from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse +from async_upnp_client.aiohttp import AiohttpNotifyServer from async_upnp_client.client import UpnpDevice from async_upnp_client.profiles.igd import IgdDevice, IgdState import pytest @@ -98,9 +101,24 @@ def mock_igd_device(mock_async_create_device) -> IgdDevice: port_mapping_number_of_entries=0, ) - with patch( - "homeassistant.components.upnp.device.IgdDevice.__new__", - return_value=mock_igd_device, + mock_igd_device.async_subscribe_services = AsyncMock() + + mock_notify_server = create_autospec(AiohttpNotifyServer) + mock_notify_server.event_handler = MagicMock() + + with ( + patch( + "homeassistant.components.upnp.device.async_get_local_ip", + return_value=(socket.AF_INET, "127.0.0.1"), + ), + patch( + "homeassistant.components.upnp.device.IgdDevice.__new__", + return_value=mock_igd_device, + ), + patch( + "homeassistant.components.upnp.device.AiohttpNotifyServer.__new__", + return_value=mock_notify_server, + ), ): yield mock_igd_device @@ -136,7 +154,7 @@ def mock_setup_entry(): @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), @@ -230,7 +248,7 @@ async def mock_config_entry( ssdp_instant_discovery, mock_igd_device: IgdDevice, mock_mac_address_from_host, -): +) -> MockConfigEntry: """Create an initialized integration.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/upnp/test_config_flow.py b/tests/components/upnp/test_config_flow.py index b8a08d3f592..8799e0faab3 100644 --- a/tests/components/upnp/test_config_flow.py +++ b/tests/components/upnp/test_config_flow.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( + CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_HOST, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, @@ -473,3 +474,28 @@ async def test_flow_ssdp_with_mismatched_udn(hass: HomeAssistant) -> None: CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, CONFIG_ENTRY_HOST: TEST_HOST, } + + +async def test_options_flow( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that the options flow works.""" + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + user_input = { + CONFIG_ENTRY_FORCE_POLL: True, + } + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input, + ) + await hass.async_block_till_done() + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONFIG_ENTRY_FORCE_POLL: True, + } + assert mock_config_entry.options == { + CONFIG_ENTRY_FORCE_POLL: True, + } diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index 4b5e375f8e0..f87696b0bd1 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -5,10 +5,12 @@ from __future__ import annotations import copy from unittest.mock import AsyncMock, MagicMock, patch +from async_upnp_client.profiles.igd import IgdDevice import pytest from homeassistant.components import ssdp from homeassistant.components.upnp.const import ( + CONFIG_ENTRY_FORCE_POLL, CONFIG_ENTRY_LOCATION, CONFIG_ENTRY_MAC_ADDRESS, CONFIG_ENTRY_ORIGINAL_UDN, @@ -31,7 +33,9 @@ from tests.common import MockConfigEntry @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_mac_address_from_host") -async def test_async_setup_entry_default(hass: HomeAssistant) -> None: +async def test_async_setup_entry_default( + hass: HomeAssistant, mock_igd_device: IgdDevice +) -> None: """Test async_setup_entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -43,12 +47,17 @@ async def test_async_setup_entry_default(hass: HomeAssistant) -> None: CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. entry.add_to_hass(hass) assert await hass.config_entries.async_setup(entry.entry_id) is True + mock_igd_device.async_subscribe_services.assert_called() + @pytest.mark.usefixtures("ssdp_instant_discovery", "mock_no_mac_address_from_host") async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> None: @@ -63,6 +72,9 @@ async def test_async_setup_entry_default_no_mac_address(hass: HomeAssistant) -> CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: None, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. @@ -91,6 +103,9 @@ async def test_async_setup_entry_multi_location( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Load config_entry. @@ -119,6 +134,9 @@ async def test_async_setup_udn_mismatch( CONFIG_ENTRY_LOCATION: TEST_LOCATION, CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, }, + options={ + CONFIG_ENTRY_FORCE_POLL: False, + }, ) # Set up device discovery callback. @@ -143,3 +161,34 @@ async def test_async_setup_udn_mismatch( # Ensure that the IPv4 location is used. mock_async_create_device.assert_called_once_with(TEST_LOCATION) + + +@pytest.mark.usefixtures( + "ssdp_instant_discovery", + "mock_get_source_ip", + "mock_mac_address_from_host", +) +async def test_async_setup_entry_force_poll( + hass: HomeAssistant, mock_igd_device: IgdDevice +) -> None: + """Test async_setup_entry.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USN, + data={ + CONFIG_ENTRY_ST: TEST_ST, + CONFIG_ENTRY_UDN: TEST_UDN, + CONFIG_ENTRY_ORIGINAL_UDN: TEST_UDN, + CONFIG_ENTRY_LOCATION: TEST_LOCATION, + CONFIG_ENTRY_MAC_ADDRESS: TEST_MAC_ADDRESS, + }, + options={ + CONFIG_ENTRY_FORCE_POLL: True, + }, + ) + + # Load config_entry. + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) is True + + mock_igd_device.async_subscribe_services.assert_not_called() diff --git a/tests/components/uptime/conftest.py b/tests/components/uptime/conftest.py index 2fe96b91b63..008172dc35a 100644 --- a/tests/components/uptime/conftest.py +++ b/tests/components/uptime/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import patch import pytest -from typing_extensions import Generator from homeassistant.components.uptime.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/uptime/snapshots/test_sensor.ambr b/tests/components/uptime/snapshots/test_sensor.ambr index 92baf939eb3..fa0cb6bf8a9 100644 --- a/tests/components/uptime/snapshots/test_sensor.ambr +++ b/tests/components/uptime/snapshots/test_sensor.ambr @@ -61,6 +61,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'Uptime', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/utility_meter/test_diagnostics.py b/tests/components/utility_meter/test_diagnostics.py index cefd17fc7e4..9ecabe813b1 100644 --- a/tests/components/utility_meter/test_diagnostics.py +++ b/tests/components/utility_meter/test_diagnostics.py @@ -4,6 +4,7 @@ from aiohttp.test_utils import TestClient from freezegun import freeze_time import pytest from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.auth.models import Credentials from homeassistant.components.utility_meter.const import DOMAIN @@ -45,11 +46,6 @@ def _get_test_client_generator( return auth_client -def limit_diagnostic_attrs(prop, path) -> bool: - """Mark attributes to exclude from diagnostic snapshot.""" - return prop in {"entry_id"} - - @freeze_time("2024-04-06 00:00:00+00:00") @pytest.mark.usefixtures("socket_enabled") async def test_diagnostics( @@ -125,4 +121,4 @@ async def test_diagnostics( hass, _get_test_client_generator(hass, aiohttp_client, new_token), config_entry ) - assert diag == snapshot(exclude=limit_diagnostic_attrs) + assert diag == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/v2c/conftest.py b/tests/components/v2c/conftest.py index 1803298be28..5c7db8bbab3 100644 --- a/tests/components/v2c/conftest.py +++ b/tests/components/v2c/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the V2C tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pytrydan.models.trydan import TrydanData -from typing_extensions import Generator from homeassistant.components.v2c.const import DOMAIN from homeassistant.const import CONF_HOST diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index a4f6cad4cc8..cc34cae87f8 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -18,7 +18,7 @@ 'unique_id': 'ABC123', 'version': 1, }), - 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7')", + 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, voltage_installation=None, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7', SSID=None, IP=None, signal_status=None)", 'host_status': 200, 'raw_data': '{"ID":"ABC123","ChargeState":2,"ReadyState":0,"ChargePower":1500.27,"ChargeEnergy":1.8,"SlaveError":4,"ChargeTime":4355,"HousePower":0.0,"FVPower":0.0,"BatteryPower":0.0,"Paused":0,"Locked":0,"Timer":0,"Intensity":6,"Dynamic":0,"MinIntensity":6,"MaxIntensity":16,"PauseDynamic":0,"FirmwareVersion":"2.1.7","DynamicPowerMode":2,"ContractedPower":4600}', }) diff --git a/tests/components/v2c/snapshots/test_sensor.ambr b/tests/components/v2c/snapshots/test_sensor.ambr index cc8077333cb..7b9ae4a9ff3 100644 --- a/tests/components/v2c/snapshots/test_sensor.ambr +++ b/tests/components/v2c/snapshots/test_sensor.ambr @@ -126,7 +126,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ev-station', + 'original_icon': None, 'original_name': 'Charge power', 'platform': 'v2c', 'previous_unique_id': None, @@ -141,7 +141,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'EVSE 1.1.1.1 Charge power', - 'icon': 'mdi:ev-station', 'state_class': , 'unit_of_measurement': , }), @@ -255,6 +254,103 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installation voltage', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_installation', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_voltage_installation', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'EVSE 1.1.1.1 Installation voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ip_address', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ip_address', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 IP address', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensor[sensor.evse_1_1_1_1_meter_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -304,7 +400,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.evse_1_1_1_1_meter_error', 'has_entity_name': True, 'hidden_by': None, @@ -428,3 +524,98 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal status', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_status', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_signal_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 Signal status', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SSID', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ssid', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 SSID', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/v2c/test_diagnostics.py b/tests/components/v2c/test_diagnostics.py index 770b00e988b..eafbd68e6fc 100644 --- a/tests/components/v2c/test_diagnostics.py +++ b/tests/components/v2c/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion +from syrupy.filters import props from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -24,7 +25,6 @@ async def test_entry_diagnostics( await init_integration(hass, mock_config_entry) - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) - == snapshot() - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/v2c/test_sensor.py b/tests/components/v2c/test_sensor.py index 9e7e3800767..430f91647dd 100644 --- a/tests/components/v2c/test_sensor.py +++ b/tests/components/v2c/test_sensor.py @@ -28,7 +28,7 @@ async def test_sensor( await init_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) - assert [ + assert _METER_ERROR_OPTIONS == [ "no_error", "communication", "reading", @@ -64,4 +64,4 @@ async def test_sensor( "tcp_head_mismatch", "empty_message", "undefined_error", - ] == _METER_ERROR_OPTIONS + ] diff --git a/tests/components/vacuum/conftest.py b/tests/components/vacuum/conftest.py index 5167c868f9f..d298260c575 100644 --- a/tests/components/vacuum/conftest.py +++ b/tests/components/vacuum/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Vacuum platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/vacuum/test_device_condition.py b/tests/components/vacuum/test_device_condition.py index 5cc222a1833..9a2a67f7141 100644 --- a/tests/components/vacuum/test_device_condition.py +++ b/tests/components/vacuum/test_device_condition.py @@ -17,11 +17,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryHider from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -29,12 +25,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_conditions( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -119,7 +109,7 @@ async def test_if_state( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -181,30 +171,30 @@ async def test_if_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_docked - event - test_event2" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_docked - event - test_event2" hass.states.async_set(entry.entity_id, STATE_CLEANING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "is_cleaning - event - test_event1" # Returning means it's still cleaning hass.states.async_set(entry.entity_id, STATE_RETURNING) hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "is_cleaning - event - test_event1" async def test_if_state_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off conditions.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,5 +237,5 @@ async def test_if_state_legacy( ) hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "is_cleaning - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "is_cleaning - event - test_event1" diff --git a/tests/components/vacuum/test_device_trigger.py b/tests/components/vacuum/test_device_trigger.py index 56e351a6446..c186bd4d9eb 100644 --- a/tests/components/vacuum/test_device_trigger.py +++ b/tests/components/vacuum/test_device_trigger.py @@ -20,7 +20,6 @@ from tests.common import ( async_fire_time_changed, async_get_device_automation_capabilities, async_get_device_automations, - async_mock_service, ) @@ -29,12 +28,6 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def test_get_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -182,7 +175,7 @@ async def test_if_fires_on_state_change( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -247,18 +240,18 @@ async def test_if_fires_on_state_change( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) # Fake that the entity is docked hass.states.async_set(entry.entity_id, STATE_DOCKED) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"docked - device - {entry.entity_id} - cleaning - docked" ) @@ -267,7 +260,7 @@ async def test_if_fires_on_state_change_legacy( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for turn_on and turn_off triggers firing.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -313,9 +306,9 @@ async def test_if_fires_on_state_change_legacy( # Fake that the entity is cleaning hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"cleaning - device - {entry.entity_id} - docked - cleaning" ) @@ -324,7 +317,7 @@ async def test_if_fires_on_state_change_with_for( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for triggers firing with delay.""" config_entry = MockConfigEntry(domain="test", data={}) @@ -370,16 +363,16 @@ async def test_if_fires_on_state_change_with_for( }, ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 hass.states.async_set(entry.entity_id, STATE_CLEANING) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 await hass.async_block_till_done() assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"turn_off device - {entry.entity_id} - docked - cleaning - 0:00:05" ) diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index 3ef3b1ff4b0..e4519bcef08 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -1,8 +1,9 @@ """The tests for Valve.""" +from collections.abc import Generator + import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant.components.valve import ( DOMAIN, diff --git a/tests/components/velbus/conftest.py b/tests/components/velbus/conftest.py index 3d59ad615c6..402acb821be 100644 --- a/tests/components/velbus/conftest.py +++ b/tests/components/velbus/conftest.py @@ -1,9 +1,9 @@ """Fixtures for the Velbus tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.velbus.const import DOMAIN from homeassistant.config_entries import ConfigEntry diff --git a/tests/components/velbus/test_config_flow.py b/tests/components/velbus/test_config_flow.py index 59effcae706..432fcea10db 100644 --- a/tests/components/velbus/test_config_flow.py +++ b/tests/components/velbus/test_config_flow.py @@ -1,10 +1,10 @@ """Tests for the Velbus config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest import serial.tools.list_ports -from typing_extensions import Generator from velbusaio.exceptions import VelbusConnectionFailed from homeassistant.components import usb diff --git a/tests/components/velux/conftest.py b/tests/components/velux/conftest.py index 692216827b2..512b2a007ed 100644 --- a/tests/components/velux/conftest.py +++ b/tests/components/velux/conftest.py @@ -1,9 +1,9 @@ """Configuration for Velux tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/verisure/conftest.py b/tests/components/verisure/conftest.py index 03086ac2ead..5aafcda2bb3 100644 --- a/tests/components/verisure/conftest.py +++ b/tests/components/verisure/conftest.py @@ -2,10 +2,10 @@ from __future__ import annotations +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.verisure.const import CONF_GIID, DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD diff --git a/tests/components/vesync/snapshots/test_diagnostics.ambr b/tests/components/vesync/snapshots/test_diagnostics.ambr index fcb2cc7b286..54ed8acf2d7 100644 --- a/tests/components/vesync/snapshots/test_diagnostics.ambr +++ b/tests/components/vesync/snapshots/test_diagnostics.ambr @@ -38,13 +38,7 @@ 'setDisplay', 'setLevel', ]), - 'cid': 'abcdefghabcdefghabcdefghabcdefgh', - 'config': dict({ - 'auto_target_humidity': 60, - 'automatic_stop': True, - 'display': True, - }), - 'config_dict': dict({ + '_config_dict': dict({ 'features': list([ 'warm_mist', 'nightlight', @@ -71,6 +65,7 @@ 'LUH-A602S-WEUR', 'LUH-A602S-WEU', 'LUH-A602S-WJP', + 'LUH-A602S-WUSC', ]), 'module': 'VeSyncHumid200300S', 'warm_mist_levels': list([ @@ -80,6 +75,16 @@ 3, ]), }), + '_features': list([ + 'warm_mist', + 'nightlight', + ]), + 'cid': 'abcdefghabcdefghabcdefghabcdefgh', + 'config': dict({ + 'auto_target_humidity': 60, + 'automatic_stop': True, + 'display': True, + }), 'config_module': 'WFON_AHM_LUH-A602S-WUS_US', 'connection_status': 'online', 'connection_type': 'WiFi+BTOnboarding+BTNotify', @@ -105,10 +110,6 @@ 'device_type': 'LUH-A602S-WUS', 'enabled': False, 'extension': None, - 'features': list([ - 'warm_mist', - 'nightlight', - ]), 'mac_id': '**REDACTED**', 'manager': '**REDACTED**', 'mist_levels': list([ @@ -203,7 +204,7 @@ 'auto', 'sleep', ]), - 'supported_features': 9, + 'supported_features': 57, }), 'entity_id': 'fan.fan', 'last_changed': str, diff --git a/tests/components/vesync/snapshots/test_fan.ambr b/tests/components/vesync/snapshots/test_fan.ambr index a9210447f1e..21985afd7bf 100644 --- a/tests/components/vesync/snapshots/test_fan.ambr +++ b/tests/components/vesync/snapshots/test_fan.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -65,7 +66,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'air-purifier', 'unit_of_measurement': None, @@ -80,7 +81,7 @@ 'auto', 'sleep', ]), - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_131s', @@ -113,6 +114,7 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -155,7 +157,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': 'asd_sdfKIHG7IJHGwJGJ7GJ_ag5h3G55', 'unit_of_measurement': None, @@ -176,7 +178,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_200s', @@ -209,6 +211,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -252,7 +255,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '400s-purifier', 'unit_of_measurement': None, @@ -274,7 +277,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_400s', @@ -307,6 +310,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -350,7 +354,7 @@ 'original_name': None, 'platform': 'vesync', 'previous_unique_id': None, - 'supported_features': , + 'supported_features': , 'translation_key': None, 'unique_id': '600s-purifier', 'unit_of_measurement': None, @@ -372,7 +376,7 @@ 'sleep', ]), 'screen_status': True, - 'supported_features': , + 'supported_features': , }), 'context': , 'entity_id': 'fan.air_purifier_600s', @@ -405,6 +409,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -442,6 +447,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -495,6 +501,7 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -532,6 +539,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -569,6 +577,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_light.ambr b/tests/components/vesync/snapshots/test_light.ambr index c2c9854fa9f..36694ae3ef6 100644 --- a/tests/components/vesync/snapshots/test_light.ambr +++ b/tests/components/vesync/snapshots/test_light.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -59,6 +60,7 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -96,6 +98,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -133,6 +136,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -170,6 +174,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -259,6 +264,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -366,6 +372,7 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -403,6 +410,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -507,6 +515,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_sensor.ambr b/tests/components/vesync/snapshots/test_sensor.ambr index 97013b4e9ce..11d931e023a 100644 --- a/tests/components/vesync/snapshots/test_sensor.ambr +++ b/tests/components/vesync/snapshots/test_sensor.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -151,6 +152,7 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -236,6 +238,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -414,6 +417,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -592,6 +596,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -629,6 +634,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -682,6 +688,7 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -1013,6 +1020,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -1050,6 +1058,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vesync/snapshots/test_switch.ambr b/tests/components/vesync/snapshots/test_switch.ambr index 86b3b0ff5cd..4b271ee55d9 100644 --- a/tests/components/vesync/snapshots/test_switch.ambr +++ b/tests/components/vesync/snapshots/test_switch.ambr @@ -22,6 +22,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LV-PUR131S', + 'model_id': None, 'name': 'Air Purifier 131s', 'name_by_user': None, 'primary_config_entry': , @@ -59,6 +60,7 @@ }), 'manufacturer': 'VeSync', 'model': 'Core200S', + 'model_id': None, 'name': 'Air Purifier 200s', 'name_by_user': None, 'primary_config_entry': , @@ -96,6 +98,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C401S-WJP', + 'model_id': None, 'name': 'Air Purifier 400s', 'name_by_user': None, 'primary_config_entry': , @@ -133,6 +136,7 @@ }), 'manufacturer': 'VeSync', 'model': 'LAP-C601S-WUS', + 'model_id': None, 'name': 'Air Purifier 600s', 'name_by_user': None, 'primary_config_entry': , @@ -170,6 +174,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100', + 'model_id': None, 'name': 'Dimmable Light', 'name_by_user': None, 'primary_config_entry': , @@ -207,6 +212,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWD16', + 'model_id': None, 'name': 'Dimmer Switch', 'name_by_user': None, 'primary_config_entry': , @@ -260,6 +266,7 @@ }), 'manufacturer': 'VeSync', 'model': 'wifi-switch-1.3', + 'model_id': None, 'name': 'Outlet', 'name_by_user': None, 'primary_config_entry': , @@ -341,6 +348,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESL100CW', + 'model_id': None, 'name': 'Temperature Light', 'name_by_user': None, 'primary_config_entry': , @@ -378,6 +386,7 @@ }), 'manufacturer': 'VeSync', 'model': 'ESWL01', + 'model_id': None, 'name': 'Wall Switch', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index 6899839a0e1..372314d9fe2 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -2,13 +2,13 @@ from __future__ import annotations +from collections.abc import AsyncGenerator, Generator from dataclasses import dataclass from unittest.mock import AsyncMock, Mock, patch import pytest from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareService import ViCareDeviceAccessor, readFeature -from typing_extensions import AsyncGenerator, Generator from homeassistant.components.vicare.const import DOMAIN from homeassistant.core import HomeAssistant diff --git a/tests/components/vicare/test_diagnostics.py b/tests/components/vicare/test_diagnostics.py index 815b39545a9..6adf4fe0edc 100644 --- a/tests/components/vicare/test_diagnostics.py +++ b/tests/components/vicare/test_diagnostics.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -21,4 +22,4 @@ async def test_diagnostics( hass, hass_client, mock_vicare_gas_boiler ) - assert diag == snapshot + assert diag == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py new file mode 100644 index 00000000000..575e549f0d9 --- /dev/null +++ b/tests/components/vicare/test_types.py @@ -0,0 +1,87 @@ +"""Test ViCare diagnostics.""" + +import pytest + +from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP +from homeassistant.components.vicare.types import HeatingProgram, VentilationMode + + +@pytest.mark.parametrize( + ("vicare_program", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (HeatingProgram.COMFORT, PRESET_COMFORT), + (HeatingProgram.COMFORT_HEATING, PRESET_COMFORT), + ], +) +async def test_heating_program_to_ha_preset( + vicare_program: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare HeatingProgram to HA Preset.""" + + assert HeatingProgram.to_ha_preset(vicare_program) == expected_result + + +@pytest.mark.parametrize( + ("ha_preset", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (PRESET_SLEEP, HeatingProgram.REDUCED), + ], +) +async def test_ha_preset_to_heating_program( + ha_preset: str | None, + expected_result: str | None, +) -> None: + """Testing HA Preset tp ViCare HeatingProgram.""" + + supported_programs = [ + HeatingProgram.COMFORT, + HeatingProgram.ECO, + HeatingProgram.NORMAL, + HeatingProgram.REDUCED, + ] + assert ( + HeatingProgram.from_ha_preset(ha_preset, supported_programs) == expected_result + ) + + +@pytest.mark.parametrize( + ("vicare_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + ("sensorOverride", VentilationMode.SENSOR_OVERRIDE), + ], +) +async def test_ventilation_mode_to_ha_mode( + vicare_mode: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare mode to VentilationMode.""" + + assert VentilationMode.from_vicare_mode(vicare_mode) == expected_result + + +@pytest.mark.parametrize( + ("ha_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (VentilationMode.SENSOR_OVERRIDE, "sensorOverride"), + ], +) +async def test_ha_mode_to_ventilation_mode( + ha_mode: str | None, + expected_result: str | None, +) -> None: + """Testing VentilationMode to ViCare mode.""" + + assert VentilationMode.to_vicare_mode(ha_mode) == expected_result diff --git a/tests/components/vilfo/conftest.py b/tests/components/vilfo/conftest.py index 11b620b82e0..fbc48da28b3 100644 --- a/tests/components/vilfo/conftest.py +++ b/tests/components/vilfo/conftest.py @@ -1,9 +1,9 @@ """Vilfo tests conftest.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.vilfo import DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index b06ce2e1eb7..f33c7839c72 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -1,5 +1,6 @@ """Configure py.test.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest @@ -35,13 +36,13 @@ class MockInput: self.name = name -def get_mock_inputs(input_list): +def get_mock_inputs(input_list) -> list[MockInput]: """Return list of MockInput.""" return [MockInput(device_input) for device_input in input_list] @pytest.fixture(name="vizio_get_unique_id", autouse=True) -def vizio_get_unique_id_fixture(): +def vizio_get_unique_id_fixture() -> Generator[None]: """Mock get vizio unique ID.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -51,7 +52,7 @@ def vizio_get_unique_id_fixture(): @pytest.fixture(name="vizio_data_coordinator_update", autouse=True) -def vizio_data_coordinator_update_fixture(): +def vizio_data_coordinator_update_fixture() -> Generator[None]: """Mock get data coordinator update.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -61,7 +62,7 @@ def vizio_data_coordinator_update_fixture(): @pytest.fixture(name="vizio_data_coordinator_update_failure") -def vizio_data_coordinator_update_failure_fixture(): +def vizio_data_coordinator_update_failure_fixture() -> Generator[None]: """Mock get data coordinator update failure.""" with patch( "homeassistant.components.vizio.coordinator.gen_apps_list_from_url", @@ -71,7 +72,7 @@ def vizio_data_coordinator_update_failure_fixture(): @pytest.fixture(name="vizio_no_unique_id") -def vizio_no_unique_id_fixture(): +def vizio_no_unique_id_fixture() -> Generator[None]: """Mock no vizio unique ID returrned.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.get_unique_id", @@ -81,7 +82,7 @@ def vizio_no_unique_id_fixture(): @pytest.fixture(name="vizio_connect") -def vizio_connect_fixture(): +def vizio_connect_fixture() -> Generator[None]: """Mock valid vizio device and entry setup.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.validate_ha_config", @@ -91,7 +92,7 @@ def vizio_connect_fixture(): @pytest.fixture(name="vizio_complete_pairing") -def vizio_complete_pairing_fixture(): +def vizio_complete_pairing_fixture() -> Generator[None]: """Mock complete vizio pairing workflow.""" with ( patch( @@ -107,7 +108,7 @@ def vizio_complete_pairing_fixture(): @pytest.fixture(name="vizio_start_pairing_failure") -def vizio_start_pairing_failure_fixture(): +def vizio_start_pairing_failure_fixture() -> Generator[None]: """Mock vizio start pairing failure.""" with patch( "homeassistant.components.vizio.config_flow.VizioAsync.start_pair", @@ -117,7 +118,7 @@ def vizio_start_pairing_failure_fixture(): @pytest.fixture(name="vizio_invalid_pin_failure") -def vizio_invalid_pin_failure_fixture(): +def vizio_invalid_pin_failure_fixture() -> Generator[None]: """Mock vizio failure due to invalid pin.""" with ( patch( @@ -133,14 +134,14 @@ def vizio_invalid_pin_failure_fixture(): @pytest.fixture(name="vizio_bypass_setup") -def vizio_bypass_setup_fixture(): +def vizio_bypass_setup_fixture() -> Generator[None]: """Mock component setup.""" with patch("homeassistant.components.vizio.async_setup_entry", return_value=True): yield @pytest.fixture(name="vizio_bypass_update") -def vizio_bypass_update_fixture(): +def vizio_bypass_update_fixture() -> Generator[None]: """Mock component update.""" with ( patch( @@ -153,7 +154,7 @@ def vizio_bypass_update_fixture(): @pytest.fixture(name="vizio_guess_device_type") -def vizio_guess_device_type_fixture(): +def vizio_guess_device_type_fixture() -> Generator[None]: """Mock vizio async_guess_device_type function.""" with patch( "homeassistant.components.vizio.config_flow.async_guess_device_type", @@ -163,7 +164,7 @@ def vizio_guess_device_type_fixture(): @pytest.fixture(name="vizio_cant_connect") -def vizio_cant_connect_fixture(): +def vizio_cant_connect_fixture() -> Generator[None]: """Mock vizio device can't connect with valid auth.""" with ( patch( @@ -179,7 +180,7 @@ def vizio_cant_connect_fixture(): @pytest.fixture(name="vizio_update") -def vizio_update_fixture(): +def vizio_update_fixture() -> Generator[None]: """Mock valid updates to vizio device.""" with ( patch( @@ -223,7 +224,7 @@ def vizio_update_fixture(): @pytest.fixture(name="vizio_update_with_apps") -def vizio_update_with_apps_fixture(vizio_update: pytest.fixture): +def vizio_update_with_apps_fixture(vizio_update: None) -> Generator[None]: """Mock valid updates to vizio device that supports apps.""" with ( patch( @@ -243,7 +244,7 @@ def vizio_update_with_apps_fixture(vizio_update: pytest.fixture): @pytest.fixture(name="vizio_update_with_apps_on_input") -def vizio_update_with_apps_on_input_fixture(vizio_update: pytest.fixture): +def vizio_update_with_apps_on_input_fixture(vizio_update: None) -> Generator[None]: """Mock valid updates to vizio device that supports apps but is on a TV input.""" with ( patch( @@ -263,7 +264,7 @@ def vizio_update_with_apps_on_input_fixture(vizio_update: pytest.fixture): @pytest.fixture(name="vizio_hostname_check") -def vizio_hostname_check(): +def vizio_hostname_check() -> Generator[None]: """Mock vizio hostname resolution.""" with patch( "homeassistant.components.vizio.config_flow.socket.gethostbyname", diff --git a/tests/components/vizio/test_config_flow.py b/tests/components/vizio/test_config_flow.py index 712dd2a31b5..42d4394ca80 100644 --- a/tests/components/vizio/test_config_flow.py +++ b/tests/components/vizio/test_config_flow.py @@ -57,11 +57,8 @@ from .const import ( from tests.common import MockConfigEntry -async def test_user_flow_minimum_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_flow_minimum_fields(hass: HomeAssistant) -> None: """Test user config flow with minimum fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -81,11 +78,8 @@ async def test_user_flow_minimum_fields( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -async def test_user_flow_all_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_flow_all_fields(hass: HomeAssistant) -> None: """Test user config flow with all fields.""" # test form shows result = await hass.config_entries.flow.async_init( @@ -108,11 +102,8 @@ async def test_user_flow_all_fields( assert CONF_APPS not in result["data"] -async def test_speaker_options_flow( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_speaker_options_flow(hass: HomeAssistant) -> None: """Test options config flow for speaker.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_SPEAKER_CONFIG @@ -136,11 +127,8 @@ async def test_speaker_options_flow( assert CONF_APPS not in result["data"] -async def test_tv_options_flow_no_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_no_apps(hass: HomeAssistant) -> None: """Test options config flow for TV without providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -167,11 +155,8 @@ async def test_tv_options_flow_no_apps( assert CONF_APPS not in result["data"] -async def test_tv_options_flow_with_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_with_apps(hass: HomeAssistant) -> None: """Test options config flow for TV with providing apps option.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -199,11 +184,8 @@ async def test_tv_options_flow_with_apps( assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_tv_options_flow_start_with_volume( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_tv_options_flow_start_with_volume(hass: HomeAssistant) -> None: """Test options config flow for TV with providing apps option after providing volume step in initial config.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -241,11 +223,8 @@ async def test_tv_options_flow_start_with_volume( assert result["data"][CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_user_host_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_host_already_configured(hass: HomeAssistant) -> None: """Test host is already configured during user setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -265,11 +244,8 @@ async def test_user_host_already_configured( assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -async def test_user_serial_number_already_exists( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_serial_number_already_exists(hass: HomeAssistant) -> None: """Test serial_number is already configured with different host and name during user setup.""" # Set up new entry MockConfigEntry( @@ -289,9 +265,8 @@ async def test_user_serial_number_already_exists( assert result["errors"] == {CONF_HOST: "existing_config_entry_found"} -async def test_user_error_on_could_not_connect( - hass: HomeAssistant, vizio_no_unique_id: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_no_unique_id") +async def test_user_error_on_could_not_connect(hass: HomeAssistant) -> None: """Test with could_not_connect during user setup due to no connectivity.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_USER_VALID_TV_CONFIG @@ -301,8 +276,9 @@ async def test_user_error_on_could_not_connect( assert result["errors"] == {CONF_HOST: "cannot_connect"} +@pytest.mark.usefixtures("vizio_cant_connect") async def test_user_error_on_could_not_connect_invalid_token( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture + hass: HomeAssistant, ) -> None: """Test with could_not_connect during user setup due to invalid token.""" result = await hass.config_entries.flow.async_init( @@ -313,12 +289,10 @@ async def test_user_error_on_could_not_connect_invalid_token( assert result["errors"] == {"base": "cannot_connect"} -async def test_user_tv_pairing_no_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_user_tv_pairing_no_apps(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv during user entry and no apps configured.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -344,12 +318,10 @@ async def test_user_tv_pairing_no_apps( assert CONF_APPS not in result["data"] -async def test_user_start_pairing_failure( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_start_pairing_failure: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_start_pairing_failure" +) +async def test_user_start_pairing_failure(hass: HomeAssistant) -> None: """Test failure to start pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -360,12 +332,10 @@ async def test_user_start_pairing_failure( assert result["errors"] == {"base": "cannot_connect"} -async def test_user_invalid_pin( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_invalid_pin_failure: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_invalid_pin_failure" +) +async def test_user_invalid_pin(hass: HomeAssistant) -> None: """Test failure to complete pairing from user config flow.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -383,11 +353,8 @@ async def test_user_invalid_pin( assert result["errors"] == {CONF_PIN: "complete_pairing_failed"} -async def test_user_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_user_ignore(hass: HomeAssistant) -> None: """Test user config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -403,11 +370,8 @@ async def test_user_ignore( assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_import_flow_minimum_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_flow_minimum_fields(hass: HomeAssistant) -> None: """Test import config flow with minimum fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -425,11 +389,8 @@ async def test_import_flow_minimum_fields( assert result["data"][CONF_VOLUME_STEP] == DEFAULT_VOLUME_STEP -async def test_import_flow_all_fields( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_flow_all_fields(hass: HomeAssistant) -> None: """Test import config flow with all fields.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -446,11 +407,8 @@ async def test_import_flow_all_fields( assert result["data"][CONF_VOLUME_STEP] == VOLUME_STEP -async def test_import_entity_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_entity_already_configured(hass: HomeAssistant) -> None: """Test entity is already configured during import setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -468,11 +426,8 @@ async def test_import_entity_already_configured( assert result["reason"] == "already_configured_device" -async def test_import_flow_update_options( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_options(hass: HomeAssistant) -> None: """Test import config flow with updated options.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -499,11 +454,8 @@ async def test_import_flow_update_options( assert config_entry.options[CONF_VOLUME_STEP] == VOLUME_STEP + 1 -async def test_import_flow_update_name_and_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_name_and_apps(hass: HomeAssistant) -> None: """Test import config flow with updated name and apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -533,11 +485,8 @@ async def test_import_flow_update_name_and_apps( assert config_entry.options[CONF_APPS] == {CONF_INCLUDE: [CURRENT_APP]} -async def test_import_flow_update_remove_apps( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_update_remove_apps(hass: HomeAssistant) -> None: """Test import config flow with removed apps.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -566,12 +515,10 @@ async def test_import_flow_update_remove_apps( assert CONF_APPS not in config_entry.options -async def test_import_needs_pairing( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_import_needs_pairing(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv during import.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, data=MOCK_TV_CONFIG_NO_TOKEN @@ -603,12 +550,10 @@ async def test_import_needs_pairing( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.TV -async def test_import_with_apps_needs_pairing( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_complete_pairing: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_complete_pairing" +) +async def test_import_with_apps_needs_pairing(hass: HomeAssistant) -> None: """Test pairing config flow when access token not provided for tv but apps are included during import.""" import_config = MOCK_TV_CONFIG_NO_TOKEN.copy() import_config[CONF_APPS] = {CONF_INCLUDE: [CURRENT_APP]} @@ -646,11 +591,8 @@ async def test_import_with_apps_needs_pairing( assert result["data"][CONF_APPS][CONF_INCLUDE] == [CURRENT_APP] -async def test_import_flow_additional_configs( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_update") +async def test_import_flow_additional_configs(hass: HomeAssistant) -> None: """Test import config flow with additional configs defined in CONF_APPS.""" result = await hass.config_entries.flow.async_init( DOMAIN, @@ -666,10 +608,9 @@ async def test_import_flow_additional_configs( assert CONF_APPS not in config_entry.options +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") async def test_import_error( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test that error is logged when import config has an error.""" @@ -700,11 +641,8 @@ async def test_import_error( assert len(vizio_log_list) == 1 -async def test_import_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup") +async def test_import_ignore(hass: HomeAssistant) -> None: """Test import config flow doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -723,12 +661,10 @@ async def test_import_ignore( assert result["type"] is FlowResultType.CREATE_ENTRY -async def test_zeroconf_flow( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow(hass: HomeAssistant) -> None: """Test zeroconf config flow.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -760,12 +696,10 @@ async def test_zeroconf_flow( assert result["data"][CONF_DEVICE_CLASS] == MediaPlayerDeviceClass.SPEAKER -async def test_zeroconf_flow_already_configured( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow_already_configured(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup.""" entry = MockConfigEntry( domain=DOMAIN, @@ -786,12 +720,10 @@ async def test_zeroconf_flow_already_configured( assert result["reason"] == "already_configured" -async def test_zeroconf_flow_with_port_in_host( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_flow_with_port_in_host(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup when port is in host.""" entry = MockConfigEntry( domain=DOMAIN, @@ -814,12 +746,10 @@ async def test_zeroconf_flow_with_port_in_host( assert result["reason"] == "already_configured" -async def test_zeroconf_dupe_fail( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_dupe_fail(hass: HomeAssistant) -> None: """Test zeroconf config flow when device gets discovered multiple times.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) result = await hass.config_entries.flow.async_init( @@ -840,12 +770,10 @@ async def test_zeroconf_dupe_fail( assert result["reason"] == "already_in_progress" -async def test_zeroconf_ignore( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_ignore(hass: HomeAssistant) -> None: """Test zeroconf discovery doesn't throw an error when there's an existing ignored source.""" entry = MockConfigEntry( domain=DOMAIN, @@ -863,11 +791,8 @@ async def test_zeroconf_ignore( assert result["type"] is FlowResultType.FORM -async def test_zeroconf_no_unique_id( - hass: HomeAssistant, - vizio_guess_device_type: pytest.fixture, - vizio_no_unique_id: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_guess_device_type", "vizio_no_unique_id") +async def test_zeroconf_no_unique_id(hass: HomeAssistant) -> None: """Test zeroconf discovery aborts when unique_id is None.""" discovery_info = dataclasses.replace(MOCK_ZEROCONF_SERVICE_INFO) @@ -879,12 +804,10 @@ async def test_zeroconf_no_unique_id( assert result["reason"] == "cannot_connect" -async def test_zeroconf_abort_when_ignored( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_setup", "vizio_guess_device_type" +) +async def test_zeroconf_abort_when_ignored(hass: HomeAssistant) -> None: """Test zeroconf discovery aborts when the same host has been ignored.""" entry = MockConfigEntry( domain=DOMAIN, @@ -904,13 +827,13 @@ async def test_zeroconf_abort_when_ignored( assert result["reason"] == "already_configured" -async def test_zeroconf_flow_already_configured_hostname( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_hostname_check: pytest.fixture, - vizio_guess_device_type: pytest.fixture, -) -> None: +@pytest.mark.usefixtures( + "vizio_connect", + "vizio_bypass_setup", + "vizio_hostname_check", + "vizio_guess_device_type", +) +async def test_zeroconf_flow_already_configured_hostname(hass: HomeAssistant) -> None: """Test entity is already configured during zeroconf setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" @@ -933,12 +856,8 @@ async def test_zeroconf_flow_already_configured_hostname( assert result["reason"] == "already_configured" -async def test_import_flow_already_configured_hostname( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_setup: pytest.fixture, - vizio_hostname_check: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_bypass_setup", "vizio_hostname_check") +async def test_import_flow_already_configured_hostname(hass: HomeAssistant) -> None: """Test entity is already configured during import setup when existing entry uses hostname.""" config = MOCK_SPEAKER_CONFIG.copy() config[CONF_HOST] = "hostname" diff --git a/tests/components/vizio/test_init.py b/tests/components/vizio/test_init.py index eba5af437b1..c2b19377809 100644 --- a/tests/components/vizio/test_init.py +++ b/tests/components/vizio/test_init.py @@ -15,11 +15,8 @@ from .const import MOCK_SPEAKER_CONFIG, MOCK_USER_VALID_TV_CONFIG, UNIQUE_ID from tests.common import MockConfigEntry, async_fire_time_changed -async def test_setup_component( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_setup_component(hass: HomeAssistant) -> None: """Test component setup.""" assert await async_setup_component( hass, DOMAIN, {DOMAIN: MOCK_USER_VALID_TV_CONFIG} @@ -28,11 +25,8 @@ async def test_setup_component( assert len(hass.states.async_entity_ids(Platform.MEDIA_PLAYER)) == 1 -async def test_tv_load_and_unload( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_tv_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading TV entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -52,11 +46,8 @@ async def test_tv_load_and_unload( assert DOMAIN not in hass.data -async def test_speaker_load_and_unload( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_load_and_unload(hass: HomeAssistant) -> None: """Test loading and unloading speaker entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -76,11 +67,11 @@ async def test_speaker_load_and_unload( assert DOMAIN not in hass.data +@pytest.mark.usefixtures( + "vizio_connect", "vizio_bypass_update", "vizio_data_coordinator_update_failure" +) async def test_coordinator_update_failure( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_bypass_update: pytest.fixture, - vizio_data_coordinator_update_failure: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test coordinator update failure after 10 days.""" diff --git a/tests/components/vizio/test_media_player.py b/tests/components/vizio/test_media_player.py index 52a5732706d..12e19077c8e 100644 --- a/tests/components/vizio/test_media_player.py +++ b/tests/components/vizio/test_media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from datetime import timedelta from typing import Any @@ -129,7 +130,7 @@ def _get_attr_and_assert_base_attr( @asynccontextmanager async def _cm_for_test_setup_without_apps( all_settings: dict[str, Any], vizio_power_state: bool | None -) -> None: +) -> AsyncIterator[None]: """Context manager to setup test for Vizio devices without including app specific patches.""" with ( patch( @@ -211,7 +212,7 @@ async def _test_setup_speaker( @asynccontextmanager async def _cm_for_test_setup_tv_with_apps( hass: HomeAssistant, device_config: dict[str, Any], app_config: dict[str, Any] -) -> None: +) -> AsyncIterator[None]: """Context manager to setup test for Vizio TV with support for apps.""" config_entry = MockConfigEntry( domain=DOMAIN, data=vol.Schema(VIZIO_SCHEMA)(device_config), unique_id=UNIQUE_ID @@ -280,63 +281,46 @@ async def _test_service( assert service_call.call_args == call(*args, **kwargs) -async def test_speaker_on( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_on(hass: HomeAssistant) -> None: """Test Vizio Speaker entity setup when on.""" await _test_setup_speaker(hass, True) -async def test_speaker_off( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_speaker_off(hass: HomeAssistant) -> None: """Test Vizio Speaker entity setup when off.""" await _test_setup_speaker(hass, False) +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_speaker_unavailable( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, ) -> None: """Test Vizio Speaker entity setup when unavailable.""" await _test_setup_speaker(hass, None) -async def test_init_tv_on( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_on(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when on.""" await _test_setup_tv(hass, True) -async def test_init_tv_off( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_off(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when off.""" await _test_setup_tv(hass, False) -async def test_init_tv_unavailable( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_init_tv_unavailable(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when unavailable.""" await _test_setup_tv(hass, None) -async def test_setup_unavailable_speaker( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_cant_connect") +async def test_setup_unavailable_speaker(hass: HomeAssistant) -> None: """Test speaker entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_SPEAKER_CONFIG, unique_id=UNIQUE_ID @@ -346,9 +330,8 @@ async def test_setup_unavailable_speaker( assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -async def test_setup_unavailable_tv( - hass: HomeAssistant, vizio_cant_connect: pytest.fixture -) -> None: +@pytest.mark.usefixtures("vizio_cant_connect") +async def test_setup_unavailable_tv(hass: HomeAssistant) -> None: """Test TV entity sets up as unavailable.""" config_entry = MockConfigEntry( domain=DOMAIN, data=MOCK_USER_VALID_TV_CONFIG, unique_id=UNIQUE_ID @@ -358,11 +341,8 @@ async def test_setup_unavailable_tv( assert hass.states.get("media_player.vizio").state == STATE_UNAVAILABLE -async def test_services( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_services(hass: HomeAssistant) -> None: """Test all Vizio media player entity services.""" await _test_setup_tv(hass, True) @@ -449,11 +429,8 @@ async def test_services( await _test_service(hass, MP_DOMAIN, "pause", SERVICE_MEDIA_PAUSE, None) -async def test_options_update( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_options_update(hass: HomeAssistant) -> None: """Test when config entry update event fires.""" await _test_setup_speaker(hass, True) config_entry = hass.config_entries.async_entries(DOMAIN)[0] @@ -476,7 +453,7 @@ async def _test_update_availability_switch( hass: HomeAssistant, initial_power_state: bool | None, final_power_state: bool | None, - caplog: pytest.fixture, + caplog: pytest.LogCaptureFixture, ) -> None: now = dt_util.utcnow() future_interval = timedelta(minutes=1) @@ -516,30 +493,27 @@ async def _test_update_availability_switch( assert len(vizio_log_list) == 1 +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_unavailable_to_available( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes available after being unavailable.""" await _test_update_availability_switch(hass, None, True, caplog) +@pytest.mark.usefixtures("vizio_connect", "vizio_update") async def test_update_available_to_unavailable( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device becomes unavailable after being available.""" await _test_update_availability_switch(hass, True, None, caplog) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps.""" @@ -564,10 +538,9 @@ async def test_setup_with_apps( ) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_include( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["include"] in config.""" @@ -582,10 +555,9 @@ async def test_setup_with_apps_include( assert "app_id" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_exclude( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["exclude"] in config.""" @@ -600,10 +572,9 @@ async def test_setup_with_apps_exclude( assert "app_id" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_apps_additional_apps_config( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps and apps["additional_configs"] in config.""" @@ -679,10 +650,9 @@ def test_invalid_apps_config(hass: HomeAssistant) -> None: vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE) +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_unknown_app_config( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where app config returned is unknown.""" @@ -696,10 +666,9 @@ async def test_setup_with_unknown_app_config( assert attr["app_id"] == UNKNOWN_APP_CONFIG +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_setup_with_no_running_app( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -713,11 +682,8 @@ async def test_setup_with_no_running_app( assert "app_name" not in attr -async def test_setup_tv_without_mute( - hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update: pytest.fixture, -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update") +async def test_setup_tv_without_mute(hass: HomeAssistant) -> None: """Test Vizio TV entity setup when mute property isn't returned by Vizio API.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -737,10 +703,9 @@ async def test_setup_tv_without_mute( assert "is_volume_muted" not in attr +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps") async def test_apps_update( hass: HomeAssistant, - vizio_connect: pytest.fixture, - vizio_update_with_apps: pytest.fixture, caplog: pytest.LogCaptureFixture, ) -> None: """Test device setup with apps where no app is running.""" @@ -772,9 +737,8 @@ async def test_apps_update( assert len(apps) == len(APP_LIST) -async def test_vizio_update_with_apps_on_input( - hass: HomeAssistant, vizio_connect, vizio_update_with_apps_on_input -) -> None: +@pytest.mark.usefixtures("vizio_connect", "vizio_update_with_apps_on_input") +async def test_vizio_update_with_apps_on_input(hass: HomeAssistant) -> None: """Test a vizio TV with apps that is on a TV input.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index 6c292241237..c2978afc17f 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -43,9 +43,12 @@ async def test_pipeline( """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -98,8 +101,8 @@ async def test_pipeline( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -238,9 +241,12 @@ async def test_tts_timeout( """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -298,8 +304,8 @@ async def test_tts_timeout( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -361,9 +367,12 @@ async def test_tts_wrong_extension( """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -403,8 +412,8 @@ async def test_tts_wrong_extension( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -456,9 +465,12 @@ async def test_tts_wrong_wav_format( """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -505,8 +517,8 @@ async def test_tts_wrong_wav_format( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -558,9 +570,12 @@ async def test_empty_tts_output( """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] @@ -591,8 +606,8 @@ async def test_empty_tts_output( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", diff --git a/tests/components/wake_on_lan/conftest.py b/tests/components/wake_on_lan/conftest.py index cec3076d83e..8a1cb3f41eb 100644 --- a/tests/components/wake_on_lan/conftest.py +++ b/tests/components/wake_on_lan/conftest.py @@ -2,14 +2,24 @@ from __future__ import annotations +from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest -from typing_extensions import Generator + +from homeassistant.components.wake_on_lan.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + +DEFAULT_MAC = "00:01:02:03:04:05" @pytest.fixture -def mock_send_magic_packet() -> AsyncMock: +def mock_send_magic_packet() -> Generator[AsyncMock]: """Mock magic packet.""" with patch("wakeonlan.send_magic_packet") as mock_send: yield mock_send @@ -27,3 +37,48 @@ def mock_subprocess_call(subprocess_call_return_value: int) -> Generator[MagicMo with patch("homeassistant.components.wake_on_lan.switch.sp.call") as mock_sp: mock_sp.return_value = subprocess_call_return_value yield mock_sp + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically path uuid generator.""" + with patch( + "homeassistant.components.wake_on_lan.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Statistics integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title=f"Wake on LAN {DEFAULT_MAC}", + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/wake_on_lan/test_button.py b/tests/components/wake_on_lan/test_button.py new file mode 100644 index 00000000000..abcae686a1b --- /dev/null +++ b/tests/components/wake_on_lan/test_button.py @@ -0,0 +1,54 @@ +"""The tests for the wake on lan button platform.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.util import dt as dt_util + +from tests.common import MockConfigEntry + + +async def test_state( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + loaded_entry: MockConfigEntry, +) -> None: + """Test button default state.""" + + state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") + assert state is not None + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get("button.wake_on_lan_00_01_02_03_04_05") + assert entry + assert entry.unique_id == "00:01:02:03:04:05" + + +async def test_service_calls( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + loaded_entry: MockConfigEntry, + mock_send_magic_packet: AsyncMock, +) -> None: + """Test service call.""" + + now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") + freezer.move_to(now) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.wake_on_lan_00_01_02_03_04_05"}, + blocking=True, + ) + + assert ( + hass.states.get("button.wake_on_lan_00_01_02_03_04_05").state == now.isoformat() + ) diff --git a/tests/components/wake_on_lan/test_config_flow.py b/tests/components/wake_on_lan/test_config_flow.py new file mode 100644 index 00000000000..b565fba505e --- /dev/null +++ b/tests/components/wake_on_lan/test_config_flow.py @@ -0,0 +1,109 @@ +"""Test the Scrape config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.wake_on_lan.const import DOMAIN +from homeassistant.const import CONF_BROADCAST_ADDRESS, CONF_BROADCAST_PORT, CONF_MAC +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import DEFAULT_MAC + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + } + + await hass.async_block_till_done() + + assert loaded_entry.options == { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "192.168.255.255", + CONF_BROADCAST_PORT: 10, + } + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("button.wake_on_lan_00_01_02_03_04_05") + assert state is not None + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_MAC: DEFAULT_MAC, + CONF_BROADCAST_ADDRESS: "255.255.255.255", + CONF_BROADCAST_PORT: 9, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/wake_on_lan/test_init.py b/tests/components/wake_on_lan/test_init.py index 8cfb0e6491e..1784f8ef12d 100644 --- a/tests/components/wake_on_lan/test_init.py +++ b/tests/components/wake_on_lan/test_init.py @@ -8,9 +8,21 @@ import pytest import voluptuous as vol from homeassistant.components.wake_on_lan import DOMAIN, SERVICE_SEND_MAGIC_PACKET +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED + async def test_send_magic_packet(hass: HomeAssistant) -> None: """Test of send magic packet service call.""" diff --git a/tests/components/wake_on_lan/test_switch.py b/tests/components/wake_on_lan/test_switch.py index 77e1ba55519..9a478b46175 100644 --- a/tests/components/wake_on_lan/test_switch.py +++ b/tests/components/wake_on_lan/test_switch.py @@ -13,6 +13,7 @@ from homeassistant.const import ( STATE_ON, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component from tests.common import async_mock_service @@ -64,7 +65,7 @@ async def test_broadcast_config_ip_and_port( hass: HomeAssistant, mock_send_magic_packet: AsyncMock ) -> None: """Test with broadcast address and broadcast port config.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" broadcast_address = "255.255.255.255" port = 999 @@ -92,6 +93,7 @@ async def test_broadcast_config_ip_and_port( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with( mac, ip_address=broadcast_address, port=port ) @@ -102,7 +104,7 @@ async def test_broadcast_config_ip( ) -> None: """Test with only broadcast address.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" broadcast_address = "255.255.255.255" assert await async_setup_component( @@ -128,6 +130,7 @@ async def test_broadcast_config_ip( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, ip_address=broadcast_address) @@ -136,7 +139,7 @@ async def test_broadcast_config_port( ) -> None: """Test with only broadcast port config.""" - mac = "00-01-02-03-04-05" + mac = "00:01:02:03:04:05" port = 999 assert await async_setup_component( @@ -156,6 +159,7 @@ async def test_broadcast_config_port( blocking=True, ) + mac = dr.format_mac(mac) mock_send_magic_packet.assert_called_with(mac, port=port) diff --git a/tests/components/wake_word/test_init.py b/tests/components/wake_word/test_init.py index c19d3e7032f..cdaf7e0e3f0 100644 --- a/tests/components/wake_word/test_init.py +++ b/tests/components/wake_word/test_init.py @@ -1,14 +1,13 @@ """Test wake_word component setup.""" import asyncio -from collections.abc import AsyncIterable +from collections.abc import AsyncIterable, Generator from functools import partial from pathlib import Path from unittest.mock import patch from freezegun import freeze_time import pytest -from typing_extensions import Generator from homeassistant.components import wake_word from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigFlow diff --git a/tests/components/waqi/conftest.py b/tests/components/waqi/conftest.py index b2e1a7d77d4..75709d4f56e 100644 --- a/tests/components/waqi/conftest.py +++ b/tests/components/waqi/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the World Air Quality Index (WAQI) tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.waqi.const import CONF_STATION_NUMBER, DOMAIN from homeassistant.const import CONF_API_KEY diff --git a/tests/components/water_heater/conftest.py b/tests/components/water_heater/conftest.py index 619d5e5c359..df16e5cc6da 100644 --- a/tests/components/water_heater/conftest.py +++ b/tests/components/water_heater/conftest.py @@ -1,7 +1,8 @@ """Fixtures for water heater platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/watttime/test_diagnostics.py b/tests/components/watttime/test_diagnostics.py index 0526a64aedc..f4465a44d26 100644 --- a/tests/components/watttime/test_diagnostics.py +++ b/tests/components/watttime/test_diagnostics.py @@ -19,4 +19,4 @@ async def test_entry_diagnostics( """Test config entry diagnostics.""" assert await get_diagnostics_for_config_entry( hass, hass_client, config_entry - ) == snapshot(exclude=props("entry_id")) + ) == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/weather/__init__.py b/tests/components/weather/__init__.py index c24baad5237..2dbffbbd617 100644 --- a/tests/components/weather/__init__.py +++ b/tests/components/weather/__init__.py @@ -61,7 +61,7 @@ class MockWeatherTest(WeatherPlatform.MockWeather): async def create_entity( hass: HomeAssistant, - mock_weather: WeatherPlatform.MockWeather, + mock_weather: type[WeatherPlatform.MockWeather], manifest_extra: dict[str, Any] | None, **kwargs, ) -> WeatherPlatform.MockWeather: diff --git a/tests/components/weather/conftest.py b/tests/components/weather/conftest.py index e3e790300a0..78389381ff3 100644 --- a/tests/components/weather/conftest.py +++ b/tests/components/weather/conftest.py @@ -1,7 +1,8 @@ """Fixtures for Weather platform tests.""" +from collections.abc import Generator + import pytest -from typing_extensions import Generator from homeassistant.config_entries import ConfigFlow from homeassistant.core import HomeAssistant diff --git a/tests/components/weatherflow/conftest.py b/tests/components/weatherflow/conftest.py index c0811597228..21c251d39b5 100644 --- a/tests/components/weatherflow/conftest.py +++ b/tests/components/weatherflow/conftest.py @@ -1,12 +1,12 @@ """Fixtures for Weatherflow integration tests.""" import asyncio +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest from pyweatherflowudp.client import EVENT_DEVICE_DISCOVERED from pyweatherflowudp.device import WeatherFlowDevice -from typing_extensions import Generator from homeassistant.components.weatherflow.const import DOMAIN diff --git a/tests/components/weatherflow_cloud/__init__.py b/tests/components/weatherflow_cloud/__init__.py index c251e7868cc..31004a27f64 100644 --- a/tests/components/weatherflow_cloud/__init__.py +++ b/tests/components/weatherflow_cloud/__init__.py @@ -1 +1,13 @@ """Tests for the WeatherflowCloud integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index d47da3c7d1b..36b42bf24a8 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -1,10 +1,19 @@ """Common fixtures for the WeatherflowCloud tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch from aiohttp import ClientResponseError import pytest -from typing_extensions import Generator +from weatherflow4py.models.rest.forecast import WeatherDataForecastREST +from weatherflow4py.models.rest.observation import ObservationStationREST +from weatherflow4py.models.rest.stations import StationsResponseREST +from weatherflow4py.models.rest.unified import WeatherFlowDataREST + +from homeassistant.components.weatherflow_cloud.const import DOMAIN +from homeassistant.const import CONF_API_TOKEN + +from tests.common import MockConfigEntry, load_fixture @pytest.fixture @@ -56,3 +65,51 @@ def mock_get_stations_401_error() -> Generator[AsyncMock]: side_effect=side_effects, ) as mock_get_stations: yield mock_get_stations + + +MOCK_API_TOKEN = "1234567890" + + +@pytest.fixture +async def mock_config_entry() -> MockConfigEntry: + """Fixture for MockConfigEntry.""" + return MockConfigEntry( + domain=DOMAIN, + data={CONF_API_TOKEN: MOCK_API_TOKEN}, + version=1, + ) + + +@pytest.fixture +def mock_api(): + """Fixture for Mock WeatherFlowRestAPI.""" + get_stations_response_data = StationsResponseREST.from_json( + load_fixture("stations.json", DOMAIN) + ) + get_forecast_response_data = WeatherDataForecastREST.from_json( + load_fixture("forecast.json", DOMAIN) + ) + get_observation_response_data = ObservationStationREST.from_json( + load_fixture("station_observation.json", DOMAIN) + ) + + data = { + 24432: WeatherFlowDataREST( + weather=get_forecast_response_data, + observation=get_observation_response_data, + station=get_stations_response_data.stations[0], + device_observations=None, + ) + } + + with patch( + "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", + autospec=True, + ) as mock_api_class: + # Create an instance of AsyncMock for the API + mock_api = AsyncMock() + mock_api.get_all_data.return_value = data + # Patch the class to return our mock_api instance + mock_api_class.return_value = mock_api + + yield mock_api diff --git a/tests/components/weatherflow_cloud/fixtures/forecast.json b/tests/components/weatherflow_cloud/fixtures/forecast.json new file mode 100644 index 00000000000..62793983327 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/forecast.json @@ -0,0 +1,4783 @@ +{ + "current_conditions": { + "air_density": 1.0, + "air_temperature": 4.0, + "brightness": 59768, + "conditions": "Clear", + "delta_t": 6.0, + "dew_point": -13.0, + "feels_like": 3.0, + "icon": "clear-day", + "is_precip_local_day_rain_check": true, + "is_precip_local_yesterday_rain_check": true, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 39, + "lightning_strike_last_distance_msg": "37 - 41 km", + "lightning_strike_last_epoch": 1698522523, + "precip_accum_local_day": 0, + "precip_accum_local_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "pressure_trend": "rising", + "relative_humidity": 27, + "sea_level_pressure": 1022.1, + "solar_radiation": 498, + "station_pressure": 795.8, + "time": 1703785918, + "uv": 2, + "wet_bulb_globe_temperature": 2.0, + "wet_bulb_temperature": -1.0, + "wind_avg": 2.0, + "wind_direction": 40, + "wind_direction_cardinal": "NE", + "wind_gust": 4.0 + }, + "forecast": { + "daily": [ + { + "air_temp_high": 5.0, + "air_temp_low": -6.0, + "conditions": "Clear", + "day_num": 28, + "day_start_local": 1703746800, + "icon": "clear-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703773057, + "sunset": 1703807070 + }, + { + "air_temp_high": 7.0, + "air_temp_low": -1.0, + "conditions": "Clear", + "day_num": 29, + "day_start_local": 1703833200, + "icon": "clear-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703859473, + "sunset": 1703893513 + }, + { + "air_temp_high": 10.0, + "air_temp_low": -1.0, + "conditions": "Partly Cloudy", + "day_num": 30, + "day_start_local": 1703919600, + "icon": "partly-cloudy-day", + "month_num": 12, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "sunrise": 1703945887, + "sunset": 1703979957 + }, + { + "air_temp_high": 2.0, + "air_temp_low": -3.0, + "conditions": "Partly Cloudy", + "day_num": 31, + "day_start_local": 1704006000, + "icon": "partly-cloudy-day", + "month_num": 12, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704032299, + "sunset": 1704066403 + }, + { + "air_temp_high": 5.0, + "air_temp_low": -4.0, + "conditions": "Partly Cloudy", + "day_num": 1, + "day_start_local": 1704092400, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704118709, + "sunset": 1704152851 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -4.0, + "conditions": "Partly Cloudy", + "day_num": 2, + "day_start_local": 1704178800, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704205116, + "sunset": 1704239300 + }, + { + "air_temp_high": 3.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 3, + "day_start_local": 1704265200, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704291522, + "sunset": 1704325751 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -4.0, + "conditions": "Wintry Mix Possible", + "day_num": 4, + "day_start_local": 1704351600, + "icon": "possibly-sleet-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 20, + "precip_type": "sleet", + "sunrise": 1704377925, + "sunset": 1704412203 + }, + { + "air_temp_high": 1.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 5, + "day_start_local": 1704438000, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704464327, + "sunset": 1704498656 + }, + { + "air_temp_high": 4.0, + "air_temp_low": -5.0, + "conditions": "Partly Cloudy", + "day_num": 6, + "day_start_local": 1704524400, + "icon": "partly-cloudy-day", + "month_num": 1, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "sunrise": 1704550726, + "sunset": 1704585111 + } + ], + "hourly": [ + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": -1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 50, + "sea_level_pressure": 1021.3, + "time": 1703786400, + "uv": 4.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 12.0 + }, + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": 0.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 50, + "sea_level_pressure": 1020.5, + "time": 1703790000, + "uv": 5.0, + "wind_avg": 7.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 0.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 49, + "sea_level_pressure": 1019.3, + "time": 1703793600, + "uv": 5.0, + "wind_avg": 7.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1018.9, + "time": 1703797200, + "uv": 4.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 46, + "sea_level_pressure": 1019.9, + "time": 1703800800, + "uv": 3.0, + "wind_avg": 8.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 11.0 + }, + { + "air_temperature": 4.0, + "conditions": "Clear", + "feels_like": -1.0, + "icon": "clear-day", + "local_day": 28, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 52, + "sea_level_pressure": 1021.9, + "time": 1703804400, + "uv": 1.0, + "wind_avg": 6.0, + "wind_direction": 340, + "wind_direction_cardinal": "NNW", + "wind_gust": 9.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1025.4, + "time": 1703808000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1026.1, + "time": 1703811600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 65, + "sea_level_pressure": 1026.6, + "time": 1703815200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1026.6, + "time": 1703818800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1026.7, + "time": 1703822400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.6, + "time": 1703826000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 28, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.7, + "time": 1703829600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1026.2, + "time": 1703833200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1025.9, + "time": 1703836800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 62, + "sea_level_pressure": 1026.1, + "time": 1703840400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 61, + "sea_level_pressure": 1026.0, + "time": 1703844000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1025.9, + "time": 1703847600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 67, + "sea_level_pressure": 1026.3, + "time": 1703851200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 64, + "sea_level_pressure": 1026.8, + "time": 1703854800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 60, + "sea_level_pressure": 1027.3, + "time": 1703858400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 2.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 48, + "sea_level_pressure": 1026.2, + "time": 1703862000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 6.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 45, + "sea_level_pressure": 1023.4, + "time": 1703865600, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 2.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1021.9, + "time": 1703869200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 6.0 + }, + { + "air_temperature": 6.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 44, + "sea_level_pressure": 1020.8, + "time": 1703872800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 4.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 42, + "sea_level_pressure": 1019.3, + "time": 1703876400, + "uv": 5.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 40, + "sea_level_pressure": 1018.1, + "time": 1703880000, + "uv": 5.0, + "wind_avg": 4.0, + "wind_direction": 0, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 40, + "sea_level_pressure": 1017.8, + "time": 1703883600, + "uv": 4.0, + "wind_avg": 3.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 5.0 + }, + { + "air_temperature": 7.0, + "conditions": "Clear", + "feels_like": 5.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 41, + "sea_level_pressure": 1018.0, + "time": 1703887200, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 180, + "wind_direction_cardinal": "S", + "wind_gust": 4.0 + }, + { + "air_temperature": 5.0, + "conditions": "Clear", + "feels_like": 3.0, + "icon": "clear-day", + "local_day": 29, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 46, + "sea_level_pressure": 1018.8, + "time": 1703890800, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 180, + "wind_direction_cardinal": "S", + "wind_gust": 4.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-night", + "local_day": 29, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 55, + "sea_level_pressure": 1020.6, + "time": 1703894400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": 1.0, + "conditions": "Clear", + "feels_like": -2.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 62, + "sea_level_pressure": 1020.7, + "time": 1703898000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1020.7, + "time": 1703901600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Clear", + "feels_like": -3.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1020.8, + "time": 1703905200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 72, + "sea_level_pressure": 1020.3, + "time": 1703908800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -4.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 73, + "sea_level_pressure": 1019.9, + "time": 1703912400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 29, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1019.4, + "time": 1703916000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 75, + "sea_level_pressure": 1019.0, + "time": 1703919600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 75, + "sea_level_pressure": 1018.5, + "time": 1703923200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1018.1, + "time": 1703926800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1017.7, + "time": 1703930400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 74, + "sea_level_pressure": 1017.4, + "time": 1703934000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Clear", + "feels_like": -5.0, + "icon": "clear-night", + "local_day": 30, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 73, + "sea_level_pressure": 1017.0, + "time": 1703937600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 72, + "sea_level_pressure": 1016.8, + "time": 1703941200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 69, + "sea_level_pressure": 1016.5, + "time": 1703944800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1016.3, + "time": 1703948400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 55, + "sea_level_pressure": 1015.0, + "time": 1703952000, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 6.0, + "conditions": "Partly Cloudy", + "feels_like": 3.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 45, + "sea_level_pressure": 1013.7, + "time": 1703955600, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 8.0, + "conditions": "Partly Cloudy", + "feels_like": 5.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 39, + "sea_level_pressure": 1012.4, + "time": 1703959200, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 9.0, + "conditions": "Partly Cloudy", + "feels_like": 7.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 36, + "sea_level_pressure": 1011.5, + "time": 1703962800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 10.0, + "conditions": "Partly Cloudy", + "feels_like": 8.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 35, + "sea_level_pressure": 1010.7, + "time": 1703966400, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 9.0, + "conditions": "Partly Cloudy", + "feels_like": 7.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 35, + "sea_level_pressure": 1009.8, + "time": 1703970000, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 5.0 + }, + { + "air_temperature": 8.0, + "conditions": "Partly Cloudy", + "feels_like": 6.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 38, + "sea_level_pressure": 1010.7, + "time": 1703973600, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": 6.0, + "conditions": "Partly Cloudy", + "feels_like": 4.0, + "icon": "partly-cloudy-day", + "local_day": 30, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 43, + "sea_level_pressure": 1011.6, + "time": 1703977200, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 3.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 48, + "sea_level_pressure": 1012.5, + "time": 1703980800, + "uv": 2.0, + "wind_avg": 2.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 3.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 53, + "sea_level_pressure": 1013.1, + "time": 1703984400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 56, + "sea_level_pressure": 1013.7, + "time": 1703988000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 60, + "sea_level_pressure": 1014.4, + "time": 1703991600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 63, + "sea_level_pressure": 1014.7, + "time": 1703995200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 66, + "sea_level_pressure": 1015.1, + "time": 1703998800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 30, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-snow", + "precip_probability": 0, + "precip_type": "snow", + "relative_humidity": 67, + "sea_level_pressure": 1015.5, + "time": 1704002400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1015.3, + "time": 1704006000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 2.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1015.0, + "time": 1704009600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1014.7, + "time": 1704013200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1015.1, + "time": 1704016800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1015.5, + "time": 1704020400, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1015.9, + "time": 1704024000, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1016.7, + "time": 1704027600, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1017.4, + "time": 1704031200, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1018.2, + "time": 1704034800, + "uv": 0.0, + "wind_avg": 2.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 3.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1018.2, + "time": 1704038400, + "uv": 1.0, + "wind_avg": 2.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 4.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 64, + "sea_level_pressure": 1018.2, + "time": 1704042000, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 4.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1018.1, + "time": 1704045600, + "uv": 1.0, + "wind_avg": 3.0, + "wind_direction": 120, + "wind_direction_cardinal": "ESE", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1017.6, + "time": 1704049200, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1017.0, + "time": 1704052800, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1016.4, + "time": 1704056400, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 100, + "wind_direction_cardinal": "E", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1017.9, + "time": 1704060000, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 5.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 31, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1019.4, + "time": 1704063600, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 4.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1021.0, + "time": 1704067200, + "uv": 2.0, + "wind_avg": 3.0, + "wind_direction": 130, + "wind_direction_cardinal": "SE", + "wind_gust": 4.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 72, + "sea_level_pressure": 1021.8, + "time": 1704070800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1022.7, + "time": 1704074400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1023.6, + "time": 1704078000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1023.6, + "time": 1704081600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1023.6, + "time": 1704085200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 31, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1023.6, + "time": 1704088800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1024.0, + "time": 1704092400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1024.5, + "time": 1704096000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1024.9, + "time": 1704099600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1024.8, + "time": 1704103200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1024.6, + "time": 1704106800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1024.5, + "time": 1704110400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 4.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1024.4, + "time": 1704114000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1024.4, + "time": 1704117600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 79, + "sea_level_pressure": 1024.4, + "time": 1704121200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1022.7, + "time": 1704124800, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1021.1, + "time": 1704128400, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1019.5, + "time": 1704132000, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 49, + "sea_level_pressure": 1018.5, + "time": 1704135600, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 47, + "sea_level_pressure": 1017.4, + "time": 1704139200, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 5.0, + "conditions": "Partly Cloudy", + "feels_like": 2.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-rain", + "precip_probability": 0, + "precip_type": "rain", + "relative_humidity": 48, + "sea_level_pressure": 1016.4, + "time": 1704142800, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 170, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 1.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 50, + "sea_level_pressure": 1017.7, + "time": 1704146400, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 1, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1018.9, + "time": 1704150000, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 5.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1020.2, + "time": 1704153600, + "uv": 3.0, + "wind_avg": 3.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 5.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 66, + "sea_level_pressure": 1020.8, + "time": 1704157200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1021.4, + "time": 1704160800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1022.0, + "time": 1704164400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.9, + "time": 1704168000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.7, + "time": 1704171600, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 1, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1021.6, + "time": 1704175200, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1020.8, + "time": 1704178800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1020.1, + "time": 1704182400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1019.3, + "time": 1704186000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1019.0, + "time": 1704189600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.7, + "time": 1704193200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.4, + "time": 1704196800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 76, + "sea_level_pressure": 1018.5, + "time": 1704200400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1018.7, + "time": 1704204000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1018.9, + "time": 1704207600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1018.2, + "time": 1704211200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1017.5, + "time": 1704214800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1016.8, + "time": 1704218400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 350, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1015.7, + "time": 1704222000, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1014.7, + "time": 1704225600, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 7.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1013.6, + "time": 1704229200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 10, + "wind_direction_cardinal": "N", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 56, + "sea_level_pressure": 1014.8, + "time": 1704232800, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 2, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1016.1, + "time": 1704236400, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1017.4, + "time": 1704240000, + "uv": 2.0, + "wind_avg": 4.0, + "wind_direction": 360, + "wind_direction_cardinal": "N", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1017.7, + "time": 1704243600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.1, + "time": 1704247200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1018.5, + "time": 1704250800, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 310, + "wind_direction_cardinal": "NW", + "wind_gust": 5.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1018.4, + "time": 1704254400, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1018.4, + "time": 1704258000, + "uv": 0.0, + "wind_avg": 3.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 2, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1018.4, + "time": 1704261600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1018.4, + "time": 1704265200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1018.3, + "time": 1704268800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1018.3, + "time": 1704272400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1018.1, + "time": 1704276000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 97, + "sea_level_pressure": 1017.8, + "time": 1704279600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1017.6, + "time": 1704283200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 5.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 96, + "sea_level_pressure": 1017.7, + "time": 1704286800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1017.8, + "time": 1704290400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1017.9, + "time": 1704294000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1016.1, + "time": 1704297600, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1014.3, + "time": 1704301200, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1012.5, + "time": 1704304800, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 210, + "wind_direction_cardinal": "SSW", + "wind_gust": 6.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1011.4, + "time": 1704308400, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1010.3, + "time": 1704312000, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1009.1, + "time": 1704315600, + "uv": 4.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1010.4, + "time": 1704319200, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 3, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1011.7, + "time": 1704322800, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1012.9, + "time": 1704326400, + "uv": 3.0, + "wind_avg": 4.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 6.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 74, + "sea_level_pressure": 1013.2, + "time": 1704330000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 79, + "sea_level_pressure": 1013.5, + "time": 1704333600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 82, + "sea_level_pressure": 1013.8, + "time": 1704337200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1014.0, + "time": 1704340800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1014.1, + "time": 1704344400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 3, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1014.3, + "time": 1704348000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1014.6, + "time": 1704351600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 87, + "sea_level_pressure": 1015.0, + "time": 1704355200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1015.3, + "time": 1704358800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1015.7, + "time": 1704362400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1016.0, + "time": 1704366000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1016.4, + "time": 1704369600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1016.9, + "time": 1704373200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 84, + "sea_level_pressure": 1017.4, + "time": 1704376800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1018.0, + "time": 1704380400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 72, + "sea_level_pressure": 1016.3, + "time": 1704384000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1014.6, + "time": 1704387600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 58, + "sea_level_pressure": 1013.0, + "time": 1704391200, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 280, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 54, + "sea_level_pressure": 1011.6, + "time": 1704394800, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1010.2, + "time": 1704398400, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1008.8, + "time": 1704402000, + "uv": 4.0, + "wind_avg": 5.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1009.6, + "time": 1704405600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 4, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 60, + "sea_level_pressure": 1010.3, + "time": 1704409200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1011.0, + "time": 1704412800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 330, + "wind_direction_cardinal": "NNW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 70, + "sea_level_pressure": 1011.2, + "time": 1704416400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1011.4, + "time": 1704420000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 80, + "sea_level_pressure": 1011.6, + "time": 1704423600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 320, + "wind_direction_cardinal": "NW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 84, + "sea_level_pressure": 1011.7, + "time": 1704427200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1011.8, + "time": 1704430800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 4, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 15, + "precip_type": "sleet", + "relative_humidity": 88, + "sea_level_pressure": 1011.9, + "time": 1704434400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 300, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 10, + "precip_type": "sleet", + "relative_humidity": 90, + "sea_level_pressure": 1012.6, + "time": 1704438000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 92, + "sea_level_pressure": 1013.3, + "time": 1704441600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1014.0, + "time": 1704445200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1014.7, + "time": 1704448800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 96, + "sea_level_pressure": 1015.4, + "time": 1704452400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1016.1, + "time": 1704456000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 290, + "wind_direction_cardinal": "WNW", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 91, + "sea_level_pressure": 1015.9, + "time": 1704459600, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1015.7, + "time": 1704463200, + "uv": 1.0, + "wind_avg": 4.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1015.4, + "time": 1704466800, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1015.2, + "time": 1704470400, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 66, + "sea_level_pressure": 1015.0, + "time": 1704474000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 63, + "sea_level_pressure": 1014.7, + "time": 1704477600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 240, + "wind_direction_cardinal": "WSW", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.0, + "time": 1704481200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.2, + "time": 1704484800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1015.5, + "time": 1704488400, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1015.7, + "time": 1704492000, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 5, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 67, + "sea_level_pressure": 1015.9, + "time": 1704495600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 70, + "sea_level_pressure": 1016.2, + "time": 1704499200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 190, + "wind_direction_cardinal": "S", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1016.1, + "time": 1704502800, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 77, + "sea_level_pressure": 1016.0, + "time": 1704506400, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 80, + "sea_level_pressure": 1015.8, + "time": 1704510000, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -8.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 85, + "sea_level_pressure": 1015.7, + "time": 1704513600, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 89, + "sea_level_pressure": 1015.6, + "time": 1704517200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 5, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 92, + "sea_level_pressure": 1015.5, + "time": 1704520800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 0, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 95, + "sea_level_pressure": 1015.6, + "time": 1704524400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 6.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 1, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1015.7, + "time": 1704528000, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 2, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 99, + "sea_level_pressure": 1015.7, + "time": 1704531600, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 3, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 100, + "sea_level_pressure": 1015.8, + "time": 1704535200, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -5.0, + "conditions": "Partly Cloudy", + "feels_like": -11.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 4, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 98, + "sea_level_pressure": 1015.9, + "time": 1704538800, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -4.0, + "conditions": "Partly Cloudy", + "feels_like": -10.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 5, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 93, + "sea_level_pressure": 1016.0, + "time": 1704542400, + "uv": 0.0, + "wind_avg": 4.0, + "wind_direction": 270, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -3.0, + "conditions": "Partly Cloudy", + "feels_like": -9.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 6, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 86, + "sea_level_pressure": 1015.8, + "time": 1704546000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 7, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 78, + "sea_level_pressure": 1015.7, + "time": 1704549600, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 8, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 69, + "sea_level_pressure": 1015.6, + "time": 1704553200, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 9, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 61, + "sea_level_pressure": 1015.5, + "time": 1704556800, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 10, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 56, + "sea_level_pressure": 1015.4, + "time": 1704560400, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 11, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1015.2, + "time": 1704564000, + "uv": 1.0, + "wind_avg": 5.0, + "wind_direction": 260, + "wind_direction_cardinal": "W", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 12, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 51, + "sea_level_pressure": 1015.1, + "time": 1704567600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 13, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 51, + "sea_level_pressure": 1015.0, + "time": 1704571200, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 8.0 + }, + { + "air_temperature": 4.0, + "conditions": "Partly Cloudy", + "feels_like": 0.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 14, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 53, + "sea_level_pressure": 1014.8, + "time": 1704574800, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 3.0, + "conditions": "Partly Cloudy", + "feels_like": -1.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 15, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 55, + "sea_level_pressure": 1014.7, + "time": 1704578400, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -2.0, + "icon": "partly-cloudy-day", + "local_day": 6, + "local_hour": 16, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 57, + "sea_level_pressure": 1014.5, + "time": 1704582000, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 2.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 17, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 59, + "sea_level_pressure": 1014.4, + "time": 1704585600, + "uv": 3.0, + "wind_avg": 5.0, + "wind_direction": 230, + "wind_direction_cardinal": "SW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -3.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 18, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 5, + "precip_type": "sleet", + "relative_humidity": 62, + "sea_level_pressure": 1013.9, + "time": 1704589200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 1.0, + "conditions": "Partly Cloudy", + "feels_like": -4.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 19, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 65, + "sea_level_pressure": 1013.4, + "time": 1704592800, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": 0.0, + "conditions": "Partly Cloudy", + "feels_like": -5.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 20, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 68, + "sea_level_pressure": 1012.9, + "time": 1704596400, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 21, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 71, + "sea_level_pressure": 1012.4, + "time": 1704600000, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -1.0, + "conditions": "Partly Cloudy", + "feels_like": -6.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 22, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 73, + "sea_level_pressure": 1011.9, + "time": 1704603600, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + }, + { + "air_temperature": -2.0, + "conditions": "Partly Cloudy", + "feels_like": -7.0, + "icon": "partly-cloudy-night", + "local_day": 6, + "local_hour": 23, + "precip": 0, + "precip_icon": "chance-sleet", + "precip_probability": 0, + "precip_type": "sleet", + "relative_humidity": 75, + "sea_level_pressure": 1011.4, + "time": 1704607200, + "uv": 0.0, + "wind_avg": 5.0, + "wind_direction": 250, + "wind_direction_cardinal": "WSW", + "wind_gust": 7.0 + } + ] + }, + "latitude": 43.94962, + "location_name": "My Home Station", + "longitude": -102.86831, + "source_id_conditions": 5, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver", + "timezone_offset_minutes": -420, + "units": { + "units_air_density": "kg/m3", + "units_brightness": "lux", + "units_distance": "km", + "units_other": "metric", + "units_precip": "mm", + "units_pressure": "mb", + "units_solar_radiation": "w/m2", + "units_temp": "c", + "units_wind": "mps" + } +} diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation.json b/tests/components/weatherflow_cloud/fixtures/station_observation.json new file mode 100644 index 00000000000..148b180df73 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/station_observation.json @@ -0,0 +1,100 @@ +{ + "elevation": 2063.150146484375, + "is_public": true, + "latitude": 43.94962, + "longitude": -102.86831, + "obs": [ + { + "air_density": 0.96139, + "air_temperature": 10.5, + "barometric_pressure": 782.8, + "brightness": 757, + "delta_t": 8.4, + "dew_point": -10.4, + "feels_like": 10.5, + "heat_index": 10.5, + "lightning_strike_count": 0, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 26, + "lightning_strike_last_epoch": 1707346875, + "precip": 0.0, + "precip_accum_last_1hr": 0.0, + "precip_accum_local_day": 0.0, + "precip_accum_local_day_final": 0.0, + "precip_accum_local_yesterday": 0.0, + "precip_accum_local_yesterday_final": 0.0, + "precip_analysis_type_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "precip_minutes_local_yesterday_final": 0, + "pressure_trend": "steady", + "relative_humidity": 22, + "sea_level_pressure": 1006.2, + "solar_radiation": 6, + "station_pressure": 782.8, + "timestamp": 1708994629, + "uv": 0.03, + "wet_bulb_globe_temperature": 4.6, + "wet_bulb_temperature": 2.1, + "wind_avg": 1.4, + "wind_chill": 10.5, + "wind_direction": 203, + "wind_gust": 3.2, + "wind_lull": 0.3 + } + ], + "outdoor_keys": [ + "timestamp", + "air_temperature", + "barometric_pressure", + "station_pressure", + "pressure_trend", + "sea_level_pressure", + "relative_humidity", + "precip", + "precip_accum_last_1hr", + "precip_accum_local_day", + "precip_accum_local_day_final", + "precip_accum_local_yesterday_final", + "precip_minutes_local_day", + "precip_minutes_local_yesterday_final", + "wind_avg", + "wind_direction", + "wind_gust", + "wind_lull", + "solar_radiation", + "uv", + "brightness", + "lightning_strike_last_epoch", + "lightning_strike_last_distance", + "lightning_strike_count", + "lightning_strike_count_last_1hr", + "lightning_strike_count_last_3hr", + "feels_like", + "heat_index", + "wind_chill", + "dew_point", + "wet_bulb_temperature", + "wet_bulb_globe_temperature", + "delta_t", + "air_density" + ], + "public_name": "My Home Station", + "station_id": 24432, + "station_name": "My Home Station", + "station_units": { + "units_direction": "degrees", + "units_distance": "mi", + "units_other": "metric", + "units_precip": "in", + "units_pressure": "hpa", + "units_temp": "f", + "units_wind": "bft" + }, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver" +} diff --git a/tests/components/weatherflow_cloud/fixtures/stations.json b/tests/components/weatherflow_cloud/fixtures/stations.json new file mode 100644 index 00000000000..e0ca96bd240 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/stations.json @@ -0,0 +1,132 @@ +{ + "stations": [ + { + "created_epoch": 1658343273, + "devices": [ + { + "device_id": 7654321, + "device_meta": { + "agl": 1.8288, + "environment": "indoor", + "name": "HB-00068123", + "wifi_network_name": "" + }, + "device_type": "HB", + "firmware_revision": "177", + "hardware_revision": "1", + "location_id": 24432, + "serial_number": "HB-00068123" + }, + { + "device_id": 123456, + "device_meta": { + "agl": 1.8288, + "environment": "outdoor", + "name": "ST-11084623", + "wifi_network_name": "" + }, + "device_settings": { + "show_precip_final": true + }, + "device_type": "ST", + "firmware_revision": "172", + "hardware_revision": "1", + "location_id": 24432, + "serial_number": "ST-11084623" + } + ], + "is_local_mode": false, + "last_modified_epoch": 1658344464, + "latitude": 43.94962, + "location_id": 24432, + "longitude": -102.86831, + "name": "My Home Station", + "public_name": "My Home Station", + "station_id": 24432, + "station_items": [ + { + "device_id": 123456, + "item": "air_temperature_humidity", + "location_id": 24432, + "location_item_id": 657904, + "sort": 0, + "station_id": 24432, + "station_item_id": 657904 + }, + { + "device_id": 123456, + "item": "barometric_pressure", + "location_id": 24432, + "location_item_id": 657906, + "sort": 3, + "station_id": 24432, + "station_item_id": 657906 + }, + { + "device_id": 7654321, + "item": "diagnostics", + "location_id": 24432, + "location_item_id": 657912, + "station_id": 24432, + "station_item_id": 657912 + }, + { + "device_id": 123456, + "item": "diagnostics", + "location_id": 24432, + "location_item_id": 657913, + "sort": 6, + "station_id": 24432, + "station_item_id": 657913 + }, + { + "device_id": 123456, + "item": "light", + "location_id": 24432, + "location_item_id": 657908, + "sort": 2, + "station_id": 24432, + "station_item_id": 657908 + }, + { + "device_id": 123456, + "item": "lightning", + "location_id": 24432, + "location_item_id": 657905, + "sort": 4, + "station_id": 24432, + "station_item_id": 657905 + }, + { + "device_id": 123456, + "item": "rain", + "location_id": 24432, + "location_item_id": 657907, + "sort": 5, + "station_id": 24432, + "station_item_id": 657907 + }, + { + "device_id": 123456, + "item": "wind", + "location_id": 24432, + "location_item_id": 657909, + "sort": 1, + "station_id": 24432, + "station_item_id": 657909 + } + ], + "station_meta": { + "elevation": 2063.150146484375, + "share_with_wf": true, + "share_with_wu": true + }, + "timezone": "America/Denver", + "timezone_offset_minutes": -420 + } + ], + "status": { + "status_code": 0, + "status_message": "SUCCESS" + } +} diff --git a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..f7b635eb4fa --- /dev/null +++ b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr @@ -0,0 +1,1556 @@ +# serializer version: 1 +# name: test_all_entities[sensor.my_home_station_air_density-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_air_density', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 5, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Air density', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_density', + 'unique_id': '24432_air_density', + 'unit_of_measurement': 'kg/m³', + }) +# --- +# name: test_all_entities[sensor.my_home_station_air_density-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Air density', + 'state_class': , + 'unit_of_measurement': 'kg/m³', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_air_density', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.96139', + }) +# --- +# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_atmospheric_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Atmospheric pressure', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'barometric_pressure', + 'unique_id': '24432_barometric_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Atmospheric pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_atmospheric_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '782.8', + }) +# --- +# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Atmospheric pressure', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sea_level_pressure', + 'unique_id': '24432_sea_level_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Atmospheric pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1006.2', + }) +# --- +# name: test_all_entities[sensor.my_home_station_dew_point-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_dew_point', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Dew point', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dew_point', + 'unique_id': '24432_dew_point', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_dew_point-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Dew point', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_dew_point', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-10.4', + }) +# --- +# name: test_all_entities[sensor.my_home_station_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_distance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Distance', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_distance', + 'unique_id': '24432_lightning_strike_last_distance', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'distance', + 'friendly_name': 'My Home Station Distance', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_all_entities[sensor.my_home_station_feels_like-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_feels_like', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Feels like', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'feels_like', + 'unique_id': '24432_feels_like', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_feels_like-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Feels like', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_feels_like', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_heat_index-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_heat_index', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heat index', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heat_index', + 'unique_id': '24432_heat_index', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_heat_index-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Heat index', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_heat_index', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count', + 'unique_id': '24432_lightning_strike_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count last 1 hr', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_1hr', + 'unique_id': '24432_lightning_strike_count_last_1hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_1_hr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count last 1 hr', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count_last_1_hr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Lightning count last 3 hr', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_3hr', + 'unique_id': '24432_lightning_strike_count_last_3hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_count_last_3_hr-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station Lightning count last 3 hr', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_count_last_3_hr', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_distance-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_last_distance', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lightning last distance', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_distance', + 'unique_id': '24432_lightning_strike_last_distance', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_distance-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'distance', + 'friendly_name': 'My Home Station Lightning last distance', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_last_distance', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '26', + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_strike-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_lightning_last_strike', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lightning last strike', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_epoch', + 'unique_id': '24432_lightning_strike_last_epoch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_lightning_last_strike-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'timestamp', + 'friendly_name': 'My Home Station Lightning last strike', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_lightning_last_strike', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-07T23:01:15+00:00', + }) +# --- +# name: test_all_entities[sensor.my_home_station_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 5, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_density', + 'unique_id': '24432_air_density', + 'unit_of_measurement': 'kg/m³', + }) +# --- +# name: test_all_entities[sensor.my_home_station_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station None', + 'state_class': , + 'unit_of_measurement': 'kg/m³', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.96139', + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_none_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count', + 'unique_id': '24432_lightning_strike_count', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_none_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_none_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_1hr', + 'unique_id': '24432_lightning_strike_count_last_1hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_none_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_none_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_count_last_3hr', + 'unique_id': '24432_lightning_strike_count_last_3hr', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_none_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'friendly_name': 'My Home Station None', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_none_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_barometric-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_pressure_barometric', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure barometric', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'barometric_pressure', + 'unique_id': '24432_barometric_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_barometric-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Pressure barometric', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_pressure_barometric', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '782.8', + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_sea_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_pressure_sea_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure sea level', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'sea_level_pressure', + 'unique_id': '24432_sea_level_pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_pressure_sea_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'atmospheric_pressure', + 'friendly_name': 'My Home Station Pressure sea level', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_pressure_sea_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1006.2', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'air_temperature', + 'unique_id': '24432_air_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dew_point', + 'unique_id': '24432_dew_point', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-10.4', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'feels_like', + 'unique_id': '24432_feels_like', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heat_index', + 'unique_id': '24432_heat_index', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_5', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wind_chill', + 'unique_id': '24432_wind_chill', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_temperature', + 'unique_id': '24432_wet_bulb_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.1', + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_7-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_temperature_7', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_globe_temperature', + 'unique_id': '24432_wet_bulb_globe_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_temperature_7-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_temperature_7', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_all_entities[sensor.my_home_station_timestamp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_timestamp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lightning_strike_last_epoch', + 'unique_id': '24432_lightning_strike_last_epoch', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.my_home_station_timestamp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'timestamp', + 'friendly_name': 'My Home Station Timestamp', + }), + 'context': , + 'entity_id': 'sensor.my_home_station_timestamp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-02-07T23:01:15+00:00', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wet bulb globe temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_globe_temperature', + 'unique_id': '24432_wet_bulb_globe_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wet bulb globe temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wet_bulb_globe_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.6', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wet bulb temperature', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wet_bulb_temperature', + 'unique_id': '24432_wet_bulb_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wet_bulb_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wet bulb temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wet_bulb_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.1', + }) +# --- +# name: test_all_entities[sensor.my_home_station_wind_chill-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_home_station_wind_chill', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wind chill', + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wind_chill', + 'unique_id': '24432_wind_chill', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.my_home_station_wind_chill-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'device_class': 'temperature', + 'friendly_name': 'My Home Station Wind chill', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_home_station_wind_chill', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.5', + }) +# --- diff --git a/tests/components/weatherflow_cloud/snapshots/test_weather.ambr b/tests/components/weatherflow_cloud/snapshots/test_weather.ambr new file mode 100644 index 00000000000..569b744529c --- /dev/null +++ b/tests/components/weatherflow_cloud/snapshots/test_weather.ambr @@ -0,0 +1,62 @@ +# serializer version: 1 +# name: test_weather[weather.my_home_station-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'weather', + 'entity_category': None, + 'entity_id': 'weather.my_home_station', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'weatherflow_cloud', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'weatherflow_forecast_24432', + 'unit_of_measurement': None, + }) +# --- +# name: test_weather[weather.my_home_station-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', + 'dew_point': -13.0, + 'friendly_name': 'My Home Station', + 'humidity': 27, + 'precipitation_unit': , + 'pressure': 795.8, + 'pressure_unit': , + 'supported_features': , + 'temperature': 4.0, + 'temperature_unit': , + 'uv_index': 2, + 'visibility_unit': , + 'wind_bearing': 40.0, + 'wind_gust_speed': 14.4, + 'wind_speed': 7.2, + 'wind_speed_unit': , + }), + 'context': , + 'entity_id': 'weather.my_home_station', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sunny', + }) +# --- diff --git a/tests/components/weatherflow_cloud/test_sensor.py b/tests/components/weatherflow_cloud/test_sensor.py new file mode 100644 index 00000000000..35ce098f5a7 --- /dev/null +++ b/tests/components/weatherflow_cloud/test_sensor.py @@ -0,0 +1,29 @@ +"""Tests for the WeatherFlow Cloud sensor platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/weatherflow_cloud/test_weather.py b/tests/components/weatherflow_cloud/test_weather.py new file mode 100644 index 00000000000..04da96df423 --- /dev/null +++ b/tests/components/weatherflow_cloud/test_weather.py @@ -0,0 +1,29 @@ +"""Tests for the WeatherFlow Cloud weather platform.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_weather( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.WEATHER] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/weatherkit/conftest.py b/tests/components/weatherkit/conftest.py index d4b849115f6..14d96d28347 100644 --- a/tests/components/weatherkit/conftest.py +++ b/tests/components/weatherkit/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Apple WeatherKit tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/webhook/test_trigger.py b/tests/components/webhook/test_trigger.py index 37aae47dd14..2963db70ad4 100644 --- a/tests/components/webhook/test_trigger.py +++ b/tests/components/webhook/test_trigger.py @@ -17,7 +17,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: @pytest.fixture(autouse=True) -async def setup_http(hass): +async def setup_http(hass: HomeAssistant) -> None: """Set up http.""" assert await async_setup_component(hass, "http", {}) assert await async_setup_component(hass, "webhook", {}) diff --git a/tests/components/webmin/conftest.py b/tests/components/webmin/conftest.py index c3ad43510d5..ae0d7b26b5a 100644 --- a/tests/components/webmin/conftest.py +++ b/tests/components/webmin/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Webmin integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components.webmin.const import DEFAULT_PORT, DOMAIN from homeassistant.const import ( @@ -37,14 +37,21 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup -async def async_init_integration(hass: HomeAssistant) -> MockConfigEntry: +async def async_init_integration( + hass: HomeAssistant, with_mac_address: bool = True +) -> MockConfigEntry: """Set up the Webmin integration in Home Assistant.""" entry = MockConfigEntry(domain=DOMAIN, options=TEST_USER_INPUT, title="name") entry.add_to_hass(hass) with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture("webmin_update.json", DOMAIN), + return_value=load_json_object_fixture( + "webmin_update.json" + if with_mac_address + else "webmin_update_without_mac.json", + DOMAIN, + ), ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/webmin/fixtures/webmin_update_without_mac.json b/tests/components/webmin/fixtures/webmin_update_without_mac.json new file mode 100644 index 00000000000..e79c54d0ff2 --- /dev/null +++ b/tests/components/webmin/fixtures/webmin_update_without_mac.json @@ -0,0 +1,108 @@ +{ + "disk_total": 18104905818112, + "io": [0, 4], + "load": [ + 1.29, + 1.36, + 1.37, + 3589, + "Intel(R) Core(TM) i7-5820K CPU @ 3.30GHz", + "GenuineIntel", + 15728640, + 12 + ], + "disk_free": 7749321486336, + "kernel": { "os": "Linux", "arch": "x86_64", "version": "6.6.18-1-lts" }, + "disk_fs": [ + { + "device": "UUID=00000000-80b6-0000-8a06-000000000000", + "dir": "/", + "ifree": 14927206, + "total": 248431161344, + "used_percent": 80, + "type": "ext4", + "itotal": 15482880, + "iused": 555674, + "free": 49060442112, + "used": 186676502528, + "iused_percent": 4 + }, + { + "total": 11903838912512, + "used_percent": 38, + "iused": 3542318, + "type": "ext4", + "itotal": 366198784, + "device": "/dev/md127", + "ifree": 362656466, + "dir": "/media/disk2", + "iused_percent": 1, + "free": 7028764823552, + "used": 4275077644288 + }, + { + "dir": "/media/disk1", + "ifree": 183130757, + "device": "UUID=00000000-2bb2-0000-896c-000000000000", + "type": "ext4", + "itotal": 183140352, + "iused": 9595, + "used_percent": 89, + "total": 5952635744256, + "used": 4981066997760, + "free": 671496220672, + "iused_percent": 1 + } + ], + "drivetemps": [ + { "temp": 49, "device": "/dev/sda", "failed": "", "errors": "" }, + { "failed": "", "errors": "", "device": "/dev/sdb", "temp": 49 }, + { "device": "/dev/sdc", "temp": 51, "failed": "", "errors": "" }, + { "failed": "", "errors": "", "device": "/dev/sdd", "temp": 51 }, + { "errors": "", "failed": "", "temp": 43, "device": "/dev/sde" }, + { "device": "/dev/sdf", "temp": 40, "errors": "", "failed": "" } + ], + "mem": [32766344, 28530480, 1953088, 1944384, 27845756, ""], + "disk_used": 9442821144576, + "cputemps": [ + { "temp": 51, "core": 0 }, + { "temp": 49, "core": 1 }, + { "core": 2, "temp": 59 }, + { "temp": 51, "core": 3 }, + { "temp": 50, "core": 4 }, + { "temp": 49, "core": 5 } + ], + "procs": 310, + "cpu": [0, 8, 92, 0, 0], + "cpufans": [ + { "rpm": 0, "fan": 1 }, + { "fan": 2, "rpm": 1371 }, + { "rpm": 0, "fan": 3 }, + { "rpm": 927, "fan": 4 }, + { "rpm": 801, "fan": 5 } + ], + "load_1m": 1.29, + "load_5m": 1.36, + "load_15m": 1.37, + "mem_total": 32766344, + "mem_free": 28530480, + "swap_total": 1953088, + "swap_free": 1944384, + "uptime": { "days": 11, "minutes": 1, "seconds": 28 }, + "active_interfaces": [ + { + "scope6": ["host"], + "address": "127.0.0.1", + "address6": ["::1"], + "name": "lo", + "broadcast": 0, + "up": 1, + "index": 0, + "fullname": "lo", + "netmask6": [128], + "netmask": "255.0.0.0", + "mtu": 65536, + "edit": 1 + } + ] +} diff --git a/tests/components/webmin/test_config_flow.py b/tests/components/webmin/test_config_flow.py index a9f5eafc5c7..477ad230622 100644 --- a/tests/components/webmin/test_config_flow.py +++ b/tests/components/webmin/test_config_flow.py @@ -33,15 +33,16 @@ async def user_flow(hass: HomeAssistant) -> str: return result["flow_id"] +@pytest.mark.parametrize( + "fixture", ["webmin_update_without_mac.json", "webmin_update.json"] +) async def test_form_user( - hass: HomeAssistant, - user_flow: str, - mock_setup_entry: AsyncMock, + hass: HomeAssistant, user_flow: str, mock_setup_entry: AsyncMock, fixture: str ) -> None: """Test a successful user initiated flow.""" with patch( "homeassistant.components.webmin.helpers.WebminInstance.update", - return_value=load_json_object_fixture("webmin_update.json", DOMAIN), + return_value=load_json_object_fixture(fixture, DOMAIN), ): result = await hass.config_entries.flow.async_configure( user_flow, TEST_USER_INPUT diff --git a/tests/components/webmin/test_diagnostics.py b/tests/components/webmin/test_diagnostics.py index 5f1df44f4a8..98d6544bc76 100644 --- a/tests/components/webmin/test_diagnostics.py +++ b/tests/components/webmin/test_diagnostics.py @@ -1,6 +1,7 @@ """Tests for the diagnostics data provided by the Webmin integration.""" from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from homeassistant.core import HomeAssistant @@ -16,9 +17,6 @@ async def test_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - assert ( - await get_diagnostics_for_config_entry( - hass, hass_client, await async_init_integration(hass) - ) - == snapshot - ) + assert await get_diagnostics_for_config_entry( + hass, hass_client, await async_init_integration(hass) + ) == snapshot(exclude=props("created_at", "modified_at")) diff --git a/tests/components/webmin/test_init.py b/tests/components/webmin/test_init.py index 7b6282edfae..36894f00d5f 100644 --- a/tests/components/webmin/test_init.py +++ b/tests/components/webmin/test_init.py @@ -19,3 +19,11 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) + + +async def test_entry_without_mac_address(hass: HomeAssistant) -> None: + """Test an entry without MAC address.""" + + entry = await async_init_integration(hass, False) + + assert entry.runtime_data.unique_id == entry.entry_id diff --git a/tests/components/webostv/conftest.py b/tests/components/webostv/conftest.py index 2b5d701f899..a30ae933cca 100644 --- a/tests/components/webostv/conftest.py +++ b/tests/components/webostv/conftest.py @@ -1,17 +1,14 @@ """Common fixtures and objects for the LG webOS integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.webostv.const import LIVE_TV_APP_ID -from homeassistant.core import HomeAssistant, ServiceCall from .const import CHANNEL_1, CHANNEL_2, CLIENT_KEY, FAKE_UUID, MOCK_APPS, MOCK_INPUTS -from tests.common import async_mock_service - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -22,12 +19,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(name="client") def client_fixture(): """Patch of client library for tests.""" diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index afda36d913f..406bb9c8804 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -295,7 +295,9 @@ async def test_form_abort_uuid_configured(hass: HomeAssistant, client) -> None: assert entry.data[CONF_HOST] == "new_host" -async def test_reauth_successful(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_successful( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test that the reauthorization is successful.""" entry = await setup_webostv(hass) assert client @@ -331,7 +333,7 @@ async def test_reauth_successful(hass: HomeAssistant, client, monkeypatch) -> No ], ) async def test_reauth_errors( - hass: HomeAssistant, client, monkeypatch, side_effect, reason + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch, side_effect, reason ) -> None: """Test reauthorization errors.""" entry = await setup_webostv(hass) diff --git a/tests/components/webostv/test_device_trigger.py b/tests/components/webostv/test_device_trigger.py index 29c75d4440b..41045969335 100644 --- a/tests/components/webostv/test_device_trigger.py +++ b/tests/components/webostv/test_device_trigger.py @@ -44,7 +44,7 @@ async def test_get_triggers( async def test_if_fires_on_turn_on_request( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -97,11 +97,11 @@ async def test_if_fires_on_turn_on_request( blocking=True, ) - assert len(calls) == 2 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 - assert calls[1].data["some"] == ENTITY_ID - assert calls[1].data["id"] == 0 + assert len(service_calls) == 3 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 + assert service_calls[2].data["some"] == ENTITY_ID + assert service_calls[2].data["id"] == 0 async def test_failure_scenarios( diff --git a/tests/components/webostv/test_diagnostics.py b/tests/components/webostv/test_diagnostics.py index 934b59a7b83..e2fbc43e187 100644 --- a/tests/components/webostv/test_diagnostics.py +++ b/tests/components/webostv/test_diagnostics.py @@ -58,5 +58,7 @@ async def test_diagnostics( "source": "user", "unique_id": REDACTED, "disabled_by": None, + "created_at": entry.created_at.isoformat(), + "modified_at": entry.modified_at.isoformat(), }, } diff --git a/tests/components/webostv/test_init.py b/tests/components/webostv/test_init.py index a2961a81a4e..e2638c86f5e 100644 --- a/tests/components/webostv/test_init.py +++ b/tests/components/webostv/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import Mock from aiowebostv import WebOsTvPairError +import pytest from homeassistant.components.webostv.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState @@ -12,7 +13,9 @@ from homeassistant.core import HomeAssistant from . import setup_webostv -async def test_reauth_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_setup_entry( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test reauth flow triggered by setup entry.""" monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) monkeypatch.setattr(client, "connect", Mock(side_effect=WebOsTvPairError)) @@ -32,7 +35,9 @@ async def test_reauth_setup_entry(hass: HomeAssistant, client, monkeypatch) -> N assert flow["context"].get("entry_id") == entry.entry_id -async def test_key_update_setup_entry(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_key_update_setup_entry( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test key update from setup entry.""" monkeypatch.setattr(client, "client_key", "new_key") entry = await setup_webostv(hass) diff --git a/tests/components/webostv/test_media_player.py b/tests/components/webostv/test_media_player.py index f0d17057b33..e4c02e680bd 100644 --- a/tests/components/webostv/test_media_player.py +++ b/tests/components/webostv/test_media_player.py @@ -144,7 +144,7 @@ async def test_media_play_pause(hass: HomeAssistant, client) -> None: ], ) async def test_media_next_previous_track( - hass: HomeAssistant, client, service, client_call, monkeypatch + hass: HomeAssistant, client, service, client_call, monkeypatch: pytest.MonkeyPatch ) -> None: """Test media next/previous track services.""" await setup_webostv(hass) @@ -270,7 +270,10 @@ async def test_select_sound_output(hass: HomeAssistant, client) -> None: async def test_device_info_startup_off( - hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + client, + monkeypatch: pytest.MonkeyPatch, + device_registry: dr.DeviceRegistry, ) -> None: """Test device info when device is off at startup.""" monkeypatch.setattr(client, "system_info", None) @@ -291,7 +294,10 @@ async def test_device_info_startup_off( async def test_entity_attributes( - hass: HomeAssistant, client, monkeypatch, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + client, + monkeypatch: pytest.MonkeyPatch, + device_registry: dr.DeviceRegistry, ) -> None: """Test entity attributes.""" entry = await setup_webostv(hass) @@ -383,7 +389,7 @@ async def test_play_media(hass: HomeAssistant, client, media_id, ch_id) -> None: async def test_update_sources_live_tv_find( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test finding live TV app id in update sources.""" await setup_webostv(hass) @@ -466,7 +472,9 @@ async def test_update_sources_live_tv_find( assert len(sources) == 1 -async def test_client_disconnected(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_client_disconnected( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test error not raised when client is disconnected.""" await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -477,7 +485,10 @@ async def test_client_disconnected(hass: HomeAssistant, client, monkeypatch) -> async def test_control_error_handling( - hass: HomeAssistant, client, caplog: pytest.LogCaptureFixture, monkeypatch + hass: HomeAssistant, + client, + caplog: pytest.LogCaptureFixture, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test control errors handling.""" await setup_webostv(hass) @@ -507,7 +518,9 @@ async def test_control_error_handling( ) -async def test_supported_features(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_supported_features( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test test supported features.""" monkeypatch.setattr(client, "sound_output", "lineout") await setup_webostv(hass) @@ -565,7 +578,7 @@ async def test_supported_features(hass: HomeAssistant, client, monkeypatch) -> N async def test_cached_supported_features( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test test supported features.""" monkeypatch.setattr(client, "is_on", False) @@ -672,7 +685,7 @@ async def test_cached_supported_features( async def test_supported_features_no_cache( - hass: HomeAssistant, client, monkeypatch + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch ) -> None: """Test supported features if device is off and no cache.""" monkeypatch.setattr(client, "is_on", False) @@ -716,7 +729,7 @@ async def test_get_image_http( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http.""" url = "http://something/valid_icon" @@ -742,7 +755,7 @@ async def test_get_image_http_error( hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, caplog: pytest.LogCaptureFixture, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http error.""" url = "http://something/icon_error" @@ -769,7 +782,7 @@ async def test_get_image_https( client, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test get image via http.""" url = "https://something/valid_icon_https" @@ -789,7 +802,9 @@ async def test_get_image_https( assert content == b"https_image" -async def test_reauth_reconnect(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_reauth_reconnect( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test reauth flow triggered by reconnect.""" entry = await setup_webostv(hass) monkeypatch.setattr(client, "is_connected", Mock(return_value=False)) @@ -814,7 +829,9 @@ async def test_reauth_reconnect(hass: HomeAssistant, client, monkeypatch) -> Non assert flow["context"].get("entry_id") == entry.entry_id -async def test_update_media_state(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_update_media_state( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test updating media state.""" await setup_webostv(hass) diff --git a/tests/components/webostv/test_notify.py b/tests/components/webostv/test_notify.py index a1c37b9bf97..75c2e148310 100644 --- a/tests/components/webostv/test_notify.py +++ b/tests/components/webostv/test_notify.py @@ -72,7 +72,9 @@ async def test_notify(hass: HomeAssistant, client) -> None: ) -async def test_notify_not_connected(hass: HomeAssistant, client, monkeypatch) -> None: +async def test_notify_not_connected( + hass: HomeAssistant, client, monkeypatch: pytest.MonkeyPatch +) -> None: """Test sending a message when client is not connected.""" await setup_webostv(hass) assert hass.services.has_service(NOTIFY_DOMAIN, TV_NAME) @@ -95,7 +97,10 @@ async def test_notify_not_connected(hass: HomeAssistant, client, monkeypatch) -> async def test_icon_not_found( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, monkeypatch + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + client, + monkeypatch: pytest.MonkeyPatch, ) -> None: """Test notify icon not found error.""" await setup_webostv(hass) @@ -130,7 +135,7 @@ async def test_connection_errors( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, client, - monkeypatch, + monkeypatch: pytest.MonkeyPatch, side_effect, error, ) -> None: diff --git a/tests/components/webostv/test_trigger.py b/tests/components/webostv/test_trigger.py index 918666cf4bf..d7eeae28ea3 100644 --- a/tests/components/webostv/test_trigger.py +++ b/tests/components/webostv/test_trigger.py @@ -20,7 +20,7 @@ from tests.common import MockEntity, MockEntityPlatform async def test_webostv_turn_on_trigger_device_id( hass: HomeAssistant, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, client, ) -> None: @@ -58,14 +58,14 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data["some"] == device.id - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == device.id + assert service_calls[1].data["id"] == 0 with patch("homeassistant.config.load_yaml_dict", return_value={}): await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True) - calls.clear() + service_calls.clear() with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -75,11 +75,11 @@ async def test_webostv_turn_on_trigger_device_id( blocking=True, ) - assert len(calls) == 0 + assert len(service_calls) == 1 async def test_webostv_turn_on_trigger_entity_id( - hass: HomeAssistant, calls: list[ServiceCall], client + hass: HomeAssistant, service_calls: list[ServiceCall], client ) -> None: """Test for turn_on triggers by entity_id firing.""" await setup_webostv(hass) @@ -113,9 +113,9 @@ async def test_webostv_turn_on_trigger_entity_id( blocking=True, ) - assert len(calls) == 1 - assert calls[0].data["some"] == ENTITY_ID - assert calls[0].data["id"] == 0 + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == ENTITY_ID + assert service_calls[1].data["id"] == 0 async def test_wrong_trigger_platform_type( diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 276a383d9e9..10a9c4876b9 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -24,6 +24,7 @@ from homeassistant.core import Context, HomeAssistant, State, SupportsResponse, from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.event import async_track_state_change_event from homeassistant.loader import async_get_integration from homeassistant.setup import async_setup_component from homeassistant.util.json import json_loads @@ -2814,3 +2815,54 @@ async def test_integration_descriptions( assert response["success"] assert response["result"] + + +async def test_subscribe_entities_chained_state_change( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + hass_admin_user: MockUser, +) -> None: + """Test chaining state changed events. + + Ensure the websocket sends the off state after + the on state. + """ + + @callback + def auto_off_listener(event): + hass.states.async_set("light.permitted", "off") + + async_track_state_change_event(hass, ["light.permitted"], auto_off_listener) + + await websocket_client.send_json({"id": 7, "type": "subscribe_entities"}) + + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] + + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == {"a": {}} + + hass.states.async_set("light.permitted", "on") + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "a": {"light.permitted": {"a": {}, "c": ANY, "lc": ANY, "s": "on"}} + } + data = await websocket_client.receive_str() + msg = json_loads(data) + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "c": {"light.permitted": {"+": {"c": ANY, "lc": ANY, "s": "off"}}} + } + + await websocket_client.close() + await hass.async_block_till_done() diff --git a/tests/components/websocket_api/test_connection.py b/tests/components/websocket_api/test_connection.py index d6c2765522e..343575e5b4a 100644 --- a/tests/components/websocket_api/test_connection.py +++ b/tests/components/websocket_api/test_connection.py @@ -2,7 +2,7 @@ import logging from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch from aiohttp.test_utils import make_mocked_request import pytest @@ -75,16 +75,17 @@ async def test_exception_handling( send_messages = [] user = MockUser() refresh_token = Mock() - current_request = AsyncMock() hass.data[DOMAIN] = {} - def get_extra_info(key: str) -> Any: + def get_extra_info(key: str) -> Any | None: if key == "sslcontext": return True if key == "peername": return ("127.0.0.42", 8123) + return None + mocked_transport = Mock() mocked_transport.get_extra_info = get_extra_info mocked_request = make_mocked_request( diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 794dd410661..11665da11b4 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -5,7 +5,7 @@ from datetime import timedelta from typing import Any, cast from unittest.mock import patch -from aiohttp import ServerDisconnectedError, WSMsgType, web +from aiohttp import WSMsgType, WSServerHandshakeError, web import pytest from homeassistant.components.websocket_api import ( @@ -374,7 +374,7 @@ async def test_prepare_fail( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", side_effect=(TimeoutError, web.WebSocketResponse.prepare), ), - pytest.raises(ServerDisconnectedError), + pytest.raises(WSServerHandshakeError), ): await hass_ws_client(hass) diff --git a/tests/components/websocket_api/test_sensor.py b/tests/components/websocket_api/test_sensor.py index 3af02dc8f2b..2e5f0c6c605 100644 --- a/tests/components/websocket_api/test_sensor.py +++ b/tests/components/websocket_api/test_sensor.py @@ -1,10 +1,10 @@ """Test cases for the API stream sensor.""" from homeassistant.auth.providers.homeassistant import HassAuthProvider -from homeassistant.bootstrap import async_setup_component from homeassistant.components.websocket_api.auth import TYPE_AUTH_REQUIRED from homeassistant.components.websocket_api.http import URL from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from .test_auth import test_auth_active_with_token diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index a5926f55a94..50620b20b8b 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -145,6 +145,8 @@ def side_effect_function(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + def get_sensor_mock(said): """Get a mock of a sensor.""" diff --git a/tests/components/whirlpool/test_climate.py b/tests/components/whirlpool/test_climate.py index 18016bd9c67..cdae28f4432 100644 --- a/tests/components/whirlpool/test_climate.py +++ b/tests/components/whirlpool/test_climate.py @@ -264,10 +264,10 @@ async def test_service_calls( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 16}, blocking=True, ) - mock_instance.set_temp.assert_called_once_with(15) + mock_instance.set_temp.assert_called_once_with(16) mock_instance.set_mode.reset_mock() await hass.services.async_call( diff --git a/tests/components/whirlpool/test_diagnostics.py b/tests/components/whirlpool/test_diagnostics.py index 6cfc1b76e38..2a0b2e6fd18 100644 --- a/tests/components/whirlpool/test_diagnostics.py +++ b/tests/components/whirlpool/test_diagnostics.py @@ -29,4 +29,4 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry(hass, hass_client, mock_entry) - assert result == snapshot(exclude=props("entry_id")) + assert result == snapshot(exclude=props("entry_id", "created_at", "modified_at")) diff --git a/tests/components/whirlpool/test_sensor.py b/tests/components/whirlpool/test_sensor.py index 6af88c8a9f3..548025e29bd 100644 --- a/tests/components/whirlpool/test_sensor.py +++ b/tests/components/whirlpool/test_sensor.py @@ -42,6 +42,8 @@ def side_effect_function_open_door(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + async def test_dryer_sensor_values( hass: HomeAssistant, diff --git a/tests/components/whois/conftest.py b/tests/components/whois/conftest.py index 5fe420abb92..1c779cce671 100644 --- a/tests/components/whois/conftest.py +++ b/tests/components/whois/conftest.py @@ -2,11 +2,11 @@ from __future__ import annotations +from collections.abc import Generator from datetime import datetime from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant.components.whois.const import DOMAIN from homeassistant.const import CONF_DOMAIN diff --git a/tests/components/whois/snapshots/test_sensor.ambr b/tests/components/whois/snapshots/test_sensor.ambr index 9bc125f204b..4310bc77ebf 100644 --- a/tests/components/whois/snapshots/test_sensor.ambr +++ b/tests/components/whois/snapshots/test_sensor.ambr @@ -67,6 +67,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -145,6 +146,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -227,6 +229,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -305,6 +308,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -383,6 +387,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -460,6 +465,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -537,6 +543,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -614,6 +621,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , @@ -691,6 +699,7 @@ }), 'manufacturer': None, 'model': None, + 'model_id': None, 'name': 'home-assistant.io', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/wiffi/conftest.py b/tests/components/wiffi/conftest.py index 5f16d676e81..2383906291f 100644 --- a/tests/components/wiffi/conftest.py +++ b/tests/components/wiffi/conftest.py @@ -1,9 +1,9 @@ """Configuration for Wiffi tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/wled/conftest.py b/tests/components/wled/conftest.py index 0d839fc8666..301729843a2 100644 --- a/tests/components/wled/conftest.py +++ b/tests/components/wled/conftest.py @@ -1,11 +1,11 @@ """Fixtures for WLED integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory import pytest -from typing_extensions import Generator -from wled import Device as WLEDDevice +from wled import Device as WLEDDevice, Releases from homeassistant.components.wled.const import DOMAIN from homeassistant.const import CONF_HOST @@ -51,7 +51,24 @@ def device_fixture() -> str: @pytest.fixture -def mock_wled(device_fixture: str) -> Generator[MagicMock]: +def mock_wled_releases() -> Generator[MagicMock]: + """Return a mocked WLEDReleases client.""" + with patch( + "homeassistant.components.wled.coordinator.WLEDReleases", autospec=True + ) as wled_releases_mock: + wled_releases = wled_releases_mock.return_value + wled_releases.releases.return_value = Releases( + beta="1.0.0b5", + stable="0.99.0", + ) + + yield wled_releases + + +@pytest.fixture +def mock_wled( + device_fixture: str, mock_wled_releases: MagicMock +) -> Generator[MagicMock]: """Return a mocked WLED client.""" with ( patch( @@ -60,11 +77,12 @@ def mock_wled(device_fixture: str) -> Generator[MagicMock]: patch("homeassistant.components.wled.config_flow.WLED", new=wled_mock), ): wled = wled_mock.return_value - wled.update.return_value = WLEDDevice( + wled.update.return_value = WLEDDevice.from_dict( load_json_object_fixture(f"{device_fixture}.json", DOMAIN) ) wled.connected = False wled.host = "127.0.0.1" + yield wled diff --git a/tests/components/wled/fixtures/cct.json b/tests/components/wled/fixtures/cct.json new file mode 100644 index 00000000000..da36f8a5f69 --- /dev/null +++ b/tests/components/wled/fixtures/cct.json @@ -0,0 +1,383 @@ +{ + "state": { + "on": true, + "bri": 255, + "transition": 7, + "ps": 2, + "pl": -1, + "nl": { + "on": false, + "dur": 60, + "mode": 1, + "tbri": 0, + "rem": -1 + }, + "udpn": { + "send": false, + "recv": true, + "sgrp": 1, + "rgrp": 1 + }, + "lor": 0, + "mainseg": 0, + "seg": [ + { + "id": 0, + "start": 0, + "stop": 178, + "len": 178, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 53, + "set": 0, + "col": [ + [0, 0, 0, 255], + [0, 0, 0, 0], + [0, 0, 0, 0] + ], + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, + "sel": true, + "rev": false, + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + } + ] + }, + "info": { + "ver": "0.15.0-b3", + "vid": 2405180, + "cn": "Kōsen", + "release": "ESP32", + "leds": { + "count": 178, + "pwr": 0, + "fps": 0, + "maxpwr": 0, + "maxseg": 32, + "bootps": 1, + "seglc": [7], + "lc": 7, + "rgbw": true, + "wv": 2, + "cct": 4 + }, + "str": false, + "name": "WLED CCT light", + "udpport": 21324, + "simplifiedui": false, + "live": false, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": 1, + "fxcount": 187, + "palcount": 75, + "cpalcount": 4, + "maps": [ + { + "id": 0 + } + ], + "wifi": { + "bssid": "AA:AA:AA:AA:AA:BB", + "rssi": -44, + "signal": 100, + "channel": 11 + }, + "fs": { + "u": 20, + "t": 983, + "pmt": 1721752272 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "clock": 240, + "flash": 4, + "lwip": 0, + "freeheap": 164804, + "uptime": 79769, + "time": "2024-7-24, 14:34:00", + "opt": 79, + "brand": "WLED", + "product": "FOSS", + "mac": "aabbccddeeff", + "ip": "127.0.0.1" + }, + "effects": [ + "Solid", + "Blink", + "Breathe", + "Wipe", + "Wipe Random", + "Random Colors", + "Sweep", + "Dynamic", + "Colorloop", + "Rainbow", + "Scan", + "Scan Dual", + "Fade", + "Theater", + "Theater Rainbow", + "Running", + "Saw", + "Twinkle", + "Dissolve", + "Dissolve Rnd", + "Sparkle", + "Sparkle Dark", + "Sparkle+", + "Strobe", + "Strobe Rainbow", + "Strobe Mega", + "Blink Rainbow", + "Android", + "Chase", + "Chase Random", + "Chase Rainbow", + "Chase Flash", + "Chase Flash Rnd", + "Rainbow Runner", + "Colorful", + "Traffic Light", + "Sweep Random", + "Chase 2", + "Aurora", + "Stream", + "Scanner", + "Lighthouse", + "Fireworks", + "Rain", + "Tetrix", + "Fire Flicker", + "Gradient", + "Loading", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", + "Tri Wipe", + "Tri Fade", + "Lightning", + "ICU", + "Multi Comet", + "Scanner Dual", + "Stream 2", + "Oscillate", + "Pride 2015", + "Juggle", + "Palette", + "Fire 2012", + "Colorwaves", + "Bpm", + "Fill Noise", + "Noise 1", + "Noise 2", + "Noise 3", + "Noise 4", + "Colortwinkles", + "Lake", + "Meteor", + "Meteor Smooth", + "Railway", + "Ripple", + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "Rotozoomer", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" + ], + "palettes": [ + "Default", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", + "Party", + "Cloud", + "Lava", + "Ocean", + "Forest", + "Rainbow", + "Rainbow Bands", + "Sunset", + "Rivendell", + "Breeze", + "Red & Blue", + "Yellowout", + "Analogous", + "Splash", + "Pastel", + "Sunset 2", + "Beach", + "Vintage", + "Departure", + "Landscape", + "Beech", + "Sherbet", + "Hult", + "Hult 64", + "Drywet", + "Jul", + "Grintage", + "Rewhi", + "Tertiary", + "Fire", + "Icefire", + "Cyane", + "Light Pink", + "Autumn", + "Magenta", + "Magred", + "Yelmag", + "Yelblu", + "Orange & Teal", + "Tiamat", + "April Night", + "Orangery", + "C9", + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" + ] +} diff --git a/tests/components/wled/fixtures/rgb.json b/tests/components/wled/fixtures/rgb.json index 21f9b005b72..50a82eb792e 100644 --- a/tests/components/wled/fixtures/rgb.json +++ b/tests/components/wled/fixtures/rgb.json @@ -1,28 +1,41 @@ { "state": { "on": true, - "bri": 127, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 1, "seg": [ { "id": 0, "start": 0, - "stop": 19, - "len": 20, + "stop": 15, + "len": 15, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 159, 0], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], @@ -30,62 +43,106 @@ "sx": 32, "ix": 128, "pal": 0, - "sel": true, + "c1": 128, + "c2": 128, + "c3": 16, + "sel": false, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 }, { "id": 1, - "start": 20, + "start": 15, "stop": 30, - "len": 10, + "len": 15, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [0, 255, 123], + [255, 170, 0], [0, 0, 0], [0, 0, 0] ], - "fx": 1, + "fx": 3, "sx": 16, "ix": 64, "pal": 1, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": true, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.5", - "version_latest_stable": "0.12.0", - "version_latest_beta": "0.13.0b1", - "vid": 1909122, + "ver": "0.14.4", + "vid": 2405180, "leds": { "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, + "pwr": 515, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [1, 1], + "lc": 1, + "rgbw": false, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "fxcount": 81, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -43, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 1718827787 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 198384, + "uptime": 966, + "time": "2024-6-19, 20:10:38", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", + "product": "FOSS", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -101,21 +158,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -127,30 +184,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -158,27 +215,133 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkle", + "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", - "Twinklefox" + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -195,11 +358,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -222,6 +385,27 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgb_no_update.json b/tests/components/wled/fixtures/rgb_no_update.json deleted file mode 100644 index c8aa902cc95..00000000000 --- a/tests/components/wled/fixtures/rgb_no_update.json +++ /dev/null @@ -1,227 +0,0 @@ -{ - "state": { - "on": true, - "bri": 127, - "transition": 7, - "ps": -1, - "pl": -1, - "nl": { - "on": false, - "dur": 60, - "fade": true, - "tbri": 0 - }, - "udpn": { - "send": false, - "recv": true - }, - "seg": [ - { - "id": 0, - "start": 0, - "stop": 19, - "len": 20, - "col": [ - [255, 159, 0], - [0, 0, 0], - [0, 0, 0] - ], - "fx": 0, - "sx": 32, - "ix": 128, - "pal": 0, - "sel": true, - "rev": false, - "cln": -1 - }, - { - "id": 1, - "start": 20, - "stop": 30, - "len": 10, - "col": [ - [0, 255, 123], - [0, 0, 0], - [0, 0, 0] - ], - "fx": 1, - "sx": 16, - "ix": 64, - "pal": 1, - "sel": true, - "rev": true, - "cln": -1 - } - ] - }, - "info": { - "ver": null, - "version_latest_stable": null, - "version_latest_beta": null, - "vid": 1909122, - "leds": { - "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, - "maxpwr": 850, - "maxseg": 10 - }, - "name": "WLED RGB Light", - "udpport": 21324, - "live": false, - "fxcount": 81, - "palcount": 50, - "wifi": { - "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, - "channel": 11 - }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, - "brand": "WLED", - "product": "DIY light", - "btype": "bin", - "mac": "aabbccddeeff", - "ip": "127.0.0.1" - }, - "effects": [ - "Solid", - "Blink", - "Breathe", - "Wipe", - "Wipe Random", - "Random Colors", - "Sweep", - "Dynamic", - "Colorloop", - "Rainbow", - "Scan", - "Dual Scan", - "Fade", - "Chase", - "Chase Rainbow", - "Running", - "Saw", - "Twinkle", - "Dissolve", - "Dissolve Rnd", - "Sparkle", - "Dark Sparkle", - "Sparkle+", - "Strobe", - "Strobe Rainbow", - "Mega Strobe", - "Blink Rainbow", - "Android", - "Chase", - "Chase Random", - "Chase Rainbow", - "Chase Flash", - "Chase Flash Rnd", - "Rainbow Runner", - "Colorful", - "Traffic Light", - "Sweep Random", - "Running 2", - "Red & Blue", - "Stream", - "Scanner", - "Lighthouse", - "Fireworks", - "Rain", - "Merry Christmas", - "Fire Flicker", - "Gradient", - "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", - "Tri Wipe", - "Tri Fade", - "Lightning", - "ICU", - "Multi Comet", - "Dual Scanner", - "Stream 2", - "Oscillate", - "Pride 2015", - "Juggle", - "Palette", - "Fire 2012", - "Colorwaves", - "BPM", - "Fill Noise", - "Noise 1", - "Noise 2", - "Noise 3", - "Noise 4", - "Colortwinkle", - "Lake", - "Meteor", - "Smooth Meteor", - "Railway", - "Ripple", - "Twinklefox" - ], - "palettes": [ - "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", - "Party", - "Cloud", - "Lava", - "Ocean", - "Forest", - "Rainbow", - "Rainbow Bands", - "Sunset", - "Rivendell", - "Breeze", - "Red & Blue", - "Yellowout", - "Analogous", - "Splash", - "Pastel", - "Sunset 2", - "Beech", - "Vintage", - "Departure", - "Landscape", - "Beach", - "Sherbet", - "Hult", - "Hult 64", - "Drywet", - "Jul", - "Grintage", - "Rewhi", - "Tertiary", - "Fire", - "Icefire", - "Cyane", - "Light Pink", - "Autumn", - "Magenta", - "Magred", - "Yelmag", - "Yelblu", - "Orange & Teal", - "Tiamat", - "April Night", - "Orangery", - "C9", - "Sakura" - ] -} diff --git a/tests/components/wled/fixtures/rgb_single_segment.json b/tests/components/wled/fixtures/rgb_single_segment.json index aa0b79e98f5..512ac2a00df 100644 --- a/tests/components/wled/fixtures/rgb_single_segment.json +++ b/tests/components/wled/fixtures/rgb_single_segment.json @@ -1,28 +1,41 @@ { "state": { "on": true, - "bri": 127, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 0, "seg": [ { "id": 0, "start": 0, "stop": 30, - "len": 20, + "len": 30, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 159, 0], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], @@ -30,44 +43,72 @@ "sx": 32, "ix": 128, "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.6b1", - "version_latest_stable": "0.8.5", - "version_latest_beta": "0.8.6b2", - "vid": 1909122, + "ver": "1.0.0b4", + "vid": 2405180, "leds": { "count": 30, - "rgbw": false, - "pin": [2], - "pwr": 470, + "pwr": 536, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [1], + "lc": 1, + "rgbw": false, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGB Light", "udpport": 21324, "live": false, - "fxcount": 81, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -44, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_4_2", - "freeheap": 14600, - "uptime": 32, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 0 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", + "product": "FOSS", "mac": "aabbccddeeff", "ip": "127.0.0.1" }, @@ -83,21 +124,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -109,30 +150,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -140,27 +181,133 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", "Noise 3", "Noise 4", - "Colortwinkle", + "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", - "Twinklefox" + "Twinklefox", + "Twinklecat", + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -177,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -204,6 +351,27 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgb_websocket.json b/tests/components/wled/fixtures/rgb_websocket.json index 4a0ed7b1ee5..f5a3e715654 100644 --- a/tests/components/wled/fixtures/rgb_websocket.json +++ b/tests/components/wled/fixtures/rgb_websocket.json @@ -1,26 +1,22 @@ { "state": { "on": true, - "bri": 255, + "bri": 128, "transition": 7, "ps": -1, "pl": -1, - "ccnf": { - "min": 1, - "max": 5, - "time": 12 - }, "nl": { "on": false, "dur": 60, - "fade": true, "mode": 1, "tbri": 0, "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, "lor": 0, "mainseg": 0, @@ -28,70 +24,89 @@ { "id": 0, "start": 0, - "stop": 13, - "len": 13, + "stop": 30, + "len": 30, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, "col": [ - [255, 181, 218], + [127, 172, 255], [0, 0, 0], [0, 0, 0] ], "fx": 0, - "sx": 43, + "sx": 128, "ix": 128, - "pal": 2, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.12.0-b2", - "version_latest_stable": "0.11.0", - "version_latest_beta": "0.12.0-b2", - "vid": 2103220, + "ver": "0.99.0", + "vid": 2405180, "leds": { - "count": 13, + "count": 30, + "pwr": 536, + "fps": 5, + "maxpwr": 850, + "maxseg": 32, + "seglc": [1], + "lc": 1, "rgbw": false, - "wv": false, - "pin": [2], - "pwr": 266, - "fps": 2, - "maxpwr": 1000, - "maxseg": 12, - "seglock": false + "wv": 0, + "cct": 0 }, "str": false, "name": "WLED WebSocket", "udpport": 21324, "live": false, + "liveseg": -1, "lm": "", "lip": "", "ws": 0, - "fxcount": 118, - "palcount": 56, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -68, - "signal": 64, - "channel": 6 + "rssi": -44, + "signal": 100, + "channel": 11 }, "fs": { - "u": 40, - "t": 1024, - "pmt": 1623156685 + "u": 12, + "t": 983, + "pmt": 0 }, "ndc": 1, - "arch": "esp8266", - "core": "2_7_4_7", - "lwip": 1, - "freeheap": 22752, - "uptime": 258411, - "opt": 127, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", "product": "FOSS", "mac": "aabbccddeeff", @@ -135,7 +150,7 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", + "Chase 2", "Aurora", "Stream", "Scanner", @@ -146,13 +161,13 @@ "Fire Flicker", "Gradient", "Loading", - "Police", - "Police All", + "Rolling Balls", + "Fairy", "Two Dots", - "Two Areas", - "Circus", - "Halloween", - "Tri Chase", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", @@ -212,10 +227,79 @@ "Chunchun", "Dancing Shadows", "Washing Machine", - "Candy Cane", + "RSVD", "Blends", "TV Simulator", - "Dynamic Smooth" + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", @@ -240,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -273,6 +357,21 @@ "C9 2", "C9 New", "Temperature", - "Aurora 2" + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ] } diff --git a/tests/components/wled/fixtures/rgbw.json b/tests/components/wled/fixtures/rgbw.json index 100b3936900..285842605ae 100644 --- a/tests/components/wled/fixtures/rgbw.json +++ b/tests/components/wled/fixtures/rgbw.json @@ -1,74 +1,115 @@ { "state": { "on": true, - "bri": 140, + "bri": 128, "transition": 7, - "ps": 1, - "pl": 3, + "ps": -1, + "pl": -1, "nl": { "on": false, "dur": 60, - "fade": true, - "tbri": 0 + "mode": 1, + "tbri": 0, + "rem": -1 }, "udpn": { "send": false, - "recv": true + "recv": true, + "sgrp": 1, + "rgrp": 1 }, + "lor": 0, + "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, - "len": 13, + "stop": 30, + "len": 30, + "grp": 1, + "spc": 0, + "of": 0, + "on": true, + "frz": false, + "bri": 255, + "cct": 127, + "set": 0, "col": [ [255, 0, 0, 139], [0, 0, 0, 0], [0, 0, 0, 0] ], - "fx": 9, - "sx": 165, + "fx": 0, + "sx": 128, "ix": 128, "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "cln": -1 + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 } ] }, "info": { - "ver": "0.8.6b4", - "version_latest_stable": "0.8.6", - "version_latest_beta": "0.8.6b5", - "vid": 1910255, + "ver": "0.99.0b1", + "vid": 2405180, "leds": { - "count": 13, - "rgbw": true, - "pin": [2], - "pwr": 208, + "count": 30, + "pwr": 536, + "fps": 5, "maxpwr": 850, - "maxseg": 10 + "maxseg": 32, + "seglc": [3], + "lc": 3, + "rgbw": true, + "wv": 0, + "cct": 0 }, + "str": false, "name": "WLED RGBW Light", "udpport": 21324, "live": false, - "fxcount": 83, - "palcount": 50, + "liveseg": -1, + "lm": "", + "lip": "", + "ws": -1, + "fxcount": 187, + "palcount": 71, + "cpalcount": 0, + "maps": [ + { + "id": 0 + } + ], "wifi": { "bssid": "AA:AA:AA:AA:AA:BB", - "rssi": -62, - "signal": 76, + "rssi": -44, + "signal": 100, "channel": 11 }, - "arch": "esp8266", - "core": "2_5_2", - "freeheap": 20136, - "uptime": 5591, - "opt": 119, + "fs": { + "u": 12, + "t": 983, + "pmt": 0 + }, + "ndc": 1, + "arch": "esp32", + "core": "v3.3.6-16-gcc5440f6a2", + "lwip": 0, + "freeheap": 196960, + "uptime": 461, + "time": "1970-1-1, 00:07:41", + "opt": 79, "brand": "WLED", - "product": "DIY light", - "btype": "bin", - "mac": "aabbccddee11", + "product": "FOSS", + "mac": "aabbccddeeff", "ip": "127.0.0.1" }, "effects": [ @@ -83,21 +124,21 @@ "Colorloop", "Rainbow", "Scan", - "Dual Scan", + "Scan Dual", "Fade", - "Chase", - "Chase Rainbow", + "Theater", + "Theater Rainbow", "Running", "Saw", "Twinkle", "Dissolve", "Dissolve Rnd", "Sparkle", - "Dark Sparkle", + "Sparkle Dark", "Sparkle+", "Strobe", "Strobe Rainbow", - "Mega Strobe", + "Strobe Mega", "Blink Rainbow", "Android", "Chase", @@ -109,30 +150,30 @@ "Colorful", "Traffic Light", "Sweep Random", - "Running 2", - "Red & Blue", + "Chase 2", + "Aurora", "Stream", "Scanner", "Lighthouse", "Fireworks", "Rain", - "Merry Christmas", + "Tetrix", "Fire Flicker", "Gradient", "Loading", - "In Out", - "In In", - "Out Out", - "Out In", - "Circus", - "Halloween", - "Tri Chase", + "Rolling Balls", + "Fairy", + "Two Dots", + "Fairytwinkle", + "Running Dual", + "RSVD", + "Chase 3", "Tri Wipe", "Tri Fade", "Lightning", "ICU", "Multi Comet", - "Dual Scanner", + "Scanner Dual", "Stream 2", "Oscillate", "Pride 2015", @@ -140,7 +181,7 @@ "Palette", "Fire 2012", "Colorwaves", - "BPM", + "Bpm", "Fill Noise", "Noise 1", "Noise 2", @@ -149,20 +190,124 @@ "Colortwinkles", "Lake", "Meteor", - "Smooth Meteor", + "Meteor Smooth", "Railway", "Ripple", "Twinklefox", "Twinklecat", - "Halloween Eyes" + "Halloween Eyes", + "Solid Pattern", + "Solid Pattern Tri", + "Spots", + "Spots Fade", + "Glitter", + "Candle", + "Fireworks Starburst", + "Fireworks 1D", + "Bouncing Balls", + "Sinelon", + "Sinelon Dual", + "Sinelon Rainbow", + "Popcorn", + "Drip", + "Plasma", + "Percent", + "Ripple Rainbow", + "Heartbeat", + "Pacifica", + "Candle Multi", + "Solid Glitter", + "Sunrise", + "Phased", + "Twinkleup", + "Noise Pal", + "Sine", + "Phased Noise", + "Flow", + "Chunchun", + "Dancing Shadows", + "Washing Machine", + "RSVD", + "Blends", + "TV Simulator", + "Dynamic Smooth", + "Spaceships", + "Crazy Bees", + "Ghost Rider", + "Blobs", + "Scrolling Text", + "Drift Rose", + "Distortion Waves", + "Soap", + "Octopus", + "Waving Cell", + "Pixels", + "Pixelwave", + "Juggles", + "Matripix", + "Gravimeter", + "Plasmoid", + "Puddles", + "Midnoise", + "Noisemeter", + "Freqwave", + "Freqmatrix", + "GEQ", + "Waterfall", + "Freqpixels", + "RSVD", + "Noisefire", + "Puddlepeak", + "Noisemove", + "Noise2D", + "Perlin Move", + "Ripple Peak", + "Firenoise", + "Squared Swirl", + "RSVD", + "DNA", + "Matrix", + "Metaballs", + "Freqmap", + "Gravcenter", + "Gravcentric", + "Gravfreq", + "DJ Light", + "Funky Plank", + "RSVD", + "Pulser", + "Blurz", + "Drift", + "Waverly", + "Sun Radiation", + "Colored Bursts", + "Julia", + "RSVD", + "RSVD", + "RSVD", + "Game Of Life", + "Tartan", + "Polar Lights", + "Swirl", + "Lissajous", + "Frizzles", + "Plasma Ball", + "Flow Stripe", + "Hiphotic", + "Sindots", + "DNA Spiral", + "Black Hole", + "Wavesins", + "Rocktaves", + "Akemi" ], "palettes": [ "Default", - "Random Cycle", - "Primary Color", - "Based on Primary", - "Set Colors", - "Based on Set", + "* Random Cycle", + "* Color 1", + "* Colors 1&2", + "* Color Gradient", + "* Colors Only", "Party", "Cloud", "Lava", @@ -179,11 +324,11 @@ "Splash", "Pastel", "Sunset 2", - "Beech", + "Beach", "Vintage", "Departure", "Landscape", - "Beach", + "Beech", "Sherbet", "Hult", "Hult 64", @@ -206,36 +351,82 @@ "April Night", "Orangery", "C9", - "Sakura" + "Sakura", + "Aurora", + "Atlantica", + "C9 2", + "C9 New", + "Temperature", + "Aurora 2", + "Retro Clown", + "Candy", + "Toxy Reaf", + "Fairy Reaf", + "Semi Blue", + "Pink Candy", + "Red Reaf", + "Aqua Flash", + "Yelblu Hot", + "Lite Light", + "Red Flash", + "Blink Red", + "Red Shift", + "Red Tide", + "Candy2" ], "presets": { "0": {}, "1": { - "on": false, - "bri": 255, + "on": true, + "bri": 128, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, + "stop": 131, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, + "n": "", "col": [ - [97, 144, 255], + [40, 255, 3], [0, 0, 0], [0, 0, 0] ], - "fx": 9, - "sx": 183, - "ix": 255, - "pal": 1, + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 }, { "stop": 0 @@ -274,31 +465,56 @@ "n": "Preset 1" }, "2": { - "on": false, - "bri": 255, + "on": true, + "bri": 128, "transition": 7, "mainseg": 0, "seg": [ { "id": 0, "start": 0, - "stop": 13, + "stop": 131, "grp": 1, "spc": 0, + "of": 0, "on": true, + "frz": false, "bri": 255, + "cct": 127, + "set": 0, + "n": "", "col": [ - [97, 144, 255], + [51, 88, 255], [0, 0, 0], [0, 0, 0] ], - "fx": 9, - "sx": 183, - "ix": 255, - "pal": 1, + "fx": 0, + "sx": 128, + "ix": 128, + "pal": 0, + "c1": 128, + "c2": 128, + "c3": 16, "sel": true, "rev": false, - "mi": false + "mi": false, + "o1": false, + "o2": false, + "o3": false, + "si": 0, + "m12": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 + }, + { + "stop": 0 }, { "stop": 0 @@ -339,23 +555,25 @@ "3": { "playlist": { "ps": [1, 2], - "dur": [30, 30], + "dur": [300, 300], "transition": [7, 7], "repeat": 0, - "r": false, - "end": 0 + "end": 0, + "r": 0 }, + "on": true, "n": "Playlist 1" }, "4": { "playlist": { - "ps": [1, 2], - "dur": [30, 30], + "ps": [2, 0], + "dur": [300, 300], "transition": [7, 7], "repeat": 0, - "r": false, - "end": 0 + "end": 0, + "r": 0 }, + "on": true, "n": "Playlist 2" } } diff --git a/tests/components/wled/snapshots/test_button.ambr b/tests/components/wled/snapshots/test_button.ambr index 9c91c0e0050..4e6260bc9bd 100644 --- a/tests/components/wled/snapshots/test_button.ambr +++ b/tests/components/wled/snapshots/test_button.ambr @@ -59,7 +59,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -71,13 +71,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_diagnostics.ambr b/tests/components/wled/snapshots/test_diagnostics.ambr index 643e5fe4ad0..90732c02c36 100644 --- a/tests/components/wled/snapshots/test_diagnostics.ambr +++ b/tests/components/wled/snapshots/test_diagnostics.ambr @@ -5,22 +5,109 @@ '0': 'Solid', '1': 'Blink', '10': 'Scan', - '11': 'Dual Scan', + '100': 'Heartbeat', + '101': 'Pacifica', + '102': 'Candle Multi', + '103': 'Solid Glitter', + '104': 'Sunrise', + '105': 'Phased', + '106': 'Twinkleup', + '107': 'Noise Pal', + '108': 'Sine', + '109': 'Phased Noise', + '11': 'Scan Dual', + '110': 'Flow', + '111': 'Chunchun', + '112': 'Dancing Shadows', + '113': 'Washing Machine', + '114': 'RSVD', + '115': 'Blends', + '116': 'TV Simulator', + '117': 'Dynamic Smooth', + '118': 'Spaceships', + '119': 'Crazy Bees', '12': 'Fade', - '13': 'Chase', - '14': 'Chase Rainbow', + '120': 'Ghost Rider', + '121': 'Blobs', + '122': 'Scrolling Text', + '123': 'Drift Rose', + '124': 'Distortion Waves', + '125': 'Soap', + '126': 'Octopus', + '127': 'Waving Cell', + '128': 'Pixels', + '129': 'Pixelwave', + '13': 'Theater', + '130': 'Juggles', + '131': 'Matripix', + '132': 'Gravimeter', + '133': 'Plasmoid', + '134': 'Puddles', + '135': 'Midnoise', + '136': 'Noisemeter', + '137': 'Freqwave', + '138': 'Freqmatrix', + '139': 'GEQ', + '14': 'Theater Rainbow', + '140': 'Waterfall', + '141': 'Freqpixels', + '142': 'RSVD', + '143': 'Noisefire', + '144': 'Puddlepeak', + '145': 'Noisemove', + '146': 'Noise2D', + '147': 'Perlin Move', + '148': 'Ripple Peak', + '149': 'Firenoise', '15': 'Running', + '150': 'Squared Swirl', + '151': 'RSVD', + '152': 'DNA', + '153': 'Matrix', + '154': 'Metaballs', + '155': 'Freqmap', + '156': 'Gravcenter', + '157': 'Gravcentric', + '158': 'Gravfreq', + '159': 'DJ Light', '16': 'Saw', + '160': 'Funky Plank', + '161': 'RSVD', + '162': 'Pulser', + '163': 'Blurz', + '164': 'Drift', + '165': 'Waverly', + '166': 'Sun Radiation', + '167': 'Colored Bursts', + '168': 'Julia', + '169': 'RSVD', '17': 'Twinkle', + '170': 'RSVD', + '171': 'RSVD', + '172': 'Game Of Life', + '173': 'Tartan', + '174': 'Polar Lights', + '175': 'Swirl', + '176': 'Lissajous', + '177': 'Frizzles', + '178': 'Plasma Ball', + '179': 'Flow Stripe', '18': 'Dissolve', + '180': 'Hiphotic', + '181': 'Sindots', + '182': 'DNA Spiral', + '183': 'Black Hole', + '184': 'Wavesins', + '185': 'Rocktaves', + '186': 'Akemi', '19': 'Dissolve Rnd', '2': 'Breathe', '20': 'Sparkle', - '21': 'Dark Sparkle', + '21': 'Sparkle Dark', '22': 'Sparkle+', '23': 'Strobe', '24': 'Strobe Rainbow', - '25': 'Mega Strobe', + '25': 'Strobe Mega', '26': 'Blink Rainbow', '27': 'Android', '28': 'Chase', @@ -33,33 +120,33 @@ '34': 'Colorful', '35': 'Traffic Light', '36': 'Sweep Random', - '37': 'Running 2', - '38': 'Red & Blue', + '37': 'Chase 2', + '38': 'Aurora', '39': 'Stream', '4': 'Wipe Random', '40': 'Scanner', '41': 'Lighthouse', '42': 'Fireworks', '43': 'Rain', - '44': 'Merry Christmas', + '44': 'Tetrix', '45': 'Fire Flicker', '46': 'Gradient', '47': 'Loading', - '48': 'In Out', - '49': 'In In', + '48': 'Rolling Balls', + '49': 'Fairy', '5': 'Random Colors', - '50': 'Out Out', - '51': 'Out In', - '52': 'Circus', - '53': 'Halloween', - '54': 'Tri Chase', + '50': 'Two Dots', + '51': 'Fairytwinkle', + '52': 'Running Dual', + '53': 'RSVD', + '54': 'Chase 3', '55': 'Tri Wipe', '56': 'Tri Fade', '57': 'Lightning', '58': 'ICU', '59': 'Multi Comet', '6': 'Sweep', - '60': 'Dual Scanner', + '60': 'Scanner Dual', '61': 'Stream 2', '62': 'Oscillate', '63': 'Pride 2015', @@ -67,55 +154,82 @@ '65': 'Palette', '66': 'Fire 2012', '67': 'Colorwaves', - '68': 'BPM', + '68': 'Bpm', '69': 'Fill Noise', '7': 'Dynamic', '70': 'Noise 1', '71': 'Noise 2', '72': 'Noise 3', '73': 'Noise 4', - '74': 'Colortwinkle', + '74': 'Colortwinkles', '75': 'Lake', '76': 'Meteor', - '77': 'Smooth Meteor', + '77': 'Meteor Smooth', '78': 'Railway', '79': 'Ripple', '8': 'Colorloop', '80': 'Twinklefox', + '81': 'Twinklecat', + '82': 'Halloween Eyes', + '83': 'Solid Pattern', + '84': 'Solid Pattern Tri', + '85': 'Spots', + '86': 'Spots Fade', + '87': 'Glitter', + '88': 'Candle', + '89': 'Fireworks Starburst', '9': 'Rainbow', + '90': 'Fireworks 1D', + '91': 'Bouncing Balls', + '92': 'Sinelon', + '93': 'Sinelon Dual', + '94': 'Sinelon Rainbow', + '95': 'Popcorn', + '96': 'Drip', + '97': 'Plasma', + '98': 'Percent', + '99': 'Ripple Rainbow', }), 'info': dict({ - 'architecture': 'esp8266', - 'arduino_core_version': '2.4.2', + 'arch': 'esp32', 'brand': 'WLED', - 'build_type': 'bin', - 'effect_count': 81, - 'filesystem': None, - 'free_heap': 14600, + 'core': 'v3.3.6-16-gcc5440f6a2', + 'freeheap': 198384, + 'fs': dict({ + 'pmt': 1718827787.0, + 't': 983, + 'u': 12, + }), + 'fxcount': 187, 'ip': '127.0.0.1', 'leds': dict({ - '__type': "", - 'repr': 'Leds(cct=False, count=30, fps=None, light_capabilities=None, max_power=850, max_segments=10, power=470, rgbw=False, wv=True, segment_light_capabilities=None)', + 'count': 30, + 'fps': 5, + 'light_capabilities': 1, + 'max_power': 850, + 'max_segments': 32, + 'power': 515, + 'segment_light_capabilities': list([ + 1, + 1, + ]), }), + 'lip': '', 'live': False, - 'live_ip': 'Unknown', - 'live_mode': 'Unknown', - 'mac_address': 'aabbccddeeff', + 'lm': '', + 'mac': 'aabbccddeeff', 'name': 'WLED RGB Light', - 'pallet_count': 50, - 'product': 'DIY light', - 'udp_port': 21324, - 'uptime': 32, - 'version': '0.8.5', - 'version_id': 1909122, - 'version_latest_beta': '0.13.0b1', - 'version_latest_stable': '0.12.0', - 'websocket': None, + 'palcount': 71, + 'product': 'FOSS', + 'udpport': 21324, + 'uptime': 966, + 'ver': '0.14.4', + 'vid': 2405180, 'wifi': '**REDACTED**', }), 'palettes': dict({ '0': 'Default', - '1': 'Random Cycle', + '1': '* Random Cycle', '10': 'Forest', '11': 'Rainbow', '12': 'Rainbow Bands', @@ -126,18 +240,18 @@ '17': 'Yellowout', '18': 'Analogous', '19': 'Splash', - '2': 'Primary Color', + '2': '* Color 1', '20': 'Pastel', '21': 'Sunset 2', - '22': 'Beech', + '22': 'Beach', '23': 'Vintage', '24': 'Departure', '25': 'Landscape', - '26': 'Beach', + '26': 'Beech', '27': 'Sherbet', '28': 'Hult', '29': 'Hult 64', - '3': 'Based on Primary', + '3': '* Colors 1&2', '30': 'Drywet', '31': 'Jul', '32': 'Grintage', @@ -148,7 +262,7 @@ '37': 'Cyane', '38': 'Light Pink', '39': 'Autumn', - '4': 'Set Colors', + '4': '* Color Gradient', '40': 'Magenta', '41': 'Magred', '42': 'Yelmag', @@ -159,9 +273,30 @@ '47': 'Orangery', '48': 'C9', '49': 'Sakura', - '5': 'Based on Set', + '5': '* Colors Only', + '50': 'Aurora', + '51': 'Atlantica', + '52': 'C9 2', + '53': 'C9 New', + '54': 'Temperature', + '55': 'Aurora 2', + '56': 'Retro Clown', + '57': 'Candy', + '58': 'Toxy Reaf', + '59': 'Fairy Reaf', '6': 'Party', + '60': 'Semi Blue', + '61': 'Pink Candy', + '62': 'Red Reaf', + '63': 'Aqua Flash', + '64': 'Yelblu Hot', + '65': 'Lite Light', + '66': 'Red Flash', + '67': 'Blink Red', + '68': 'Red Shift', + '69': 'Red Tide', '7': 'Cloud', + '70': 'Candy2', '8': 'Lava', '9': 'Ocean', }), @@ -170,30 +305,90 @@ 'presets': dict({ }), 'state': dict({ - 'brightness': 127, + 'bri': 128, 'lor': 0, - 'nightlight': dict({ - '__type': "", - 'repr': 'Nightlight(duration=60, fade=True, on=False, mode=, target_brightness=0)', + 'nl': dict({ + 'dur': 60, + 'mode': 1, + 'on': False, + 'tbri': 0, }), 'on': True, - 'playlist': -1, - 'preset': -1, - 'segments': list([ - dict({ - '__type': "", - 'repr': "Segment(brightness=127, clones=-1, color_primary=(255, 159, 0), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=0, name='Solid'), intensity=128, length=20, on=True, palette=Palette(name='Default', palette_id=0), reverse=False, segment_id=0, selected=True, speed=32, start=0, stop=19)", + 'seg': dict({ + '0': dict({ + 'bri': 255, + 'cct': 127, + 'cln': -1, + 'col': list([ + list([ + 127, + 172, + 255, + ]), + list([ + 0, + 0, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + ]), + 'fx': 0, + 'id': 0, + 'ix': 128, + 'len': 15, + 'on': True, + 'pal': 0, + 'rev': False, + 'sel': False, + 'start': 0, + 'stop': 15, + 'sx': 32, }), - dict({ - '__type': "", - 'repr': "Segment(brightness=127, clones=-1, color_primary=(0, 255, 123), color_secondary=(0, 0, 0), color_tertiary=(0, 0, 0), effect=Effect(effect_id=1, name='Blink'), intensity=64, length=10, on=True, palette=Palette(name='Random Cycle', palette_id=1), reverse=True, segment_id=1, selected=True, speed=16, start=20, stop=30)", + '1': dict({ + 'bri': 255, + 'cct': 127, + 'cln': -1, + 'col': list([ + list([ + 255, + 170, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + list([ + 0, + 0, + 0, + ]), + ]), + 'fx': 3, + 'id': 1, + 'ix': 64, + 'len': 15, + 'on': True, + 'pal': 1, + 'rev': True, + 'sel': True, + 'start': 15, + 'stop': 30, + 'sx': 16, }), - ]), - 'sync': dict({ - '__type': "", - 'repr': 'Sync(receive=True, send=False)', }), 'transition': 7, + 'udpn': dict({ + 'recv': True, + 'rgrp': 1, + 'send': False, + 'sgrp': 1, + }), }), }) # --- diff --git a/tests/components/wled/snapshots/test_number.ambr b/tests/components/wled/snapshots/test_number.ambr index bee3e180090..0fb6cff3d51 100644 --- a/tests/components/wled/snapshots/test_number.ambr +++ b/tests/components/wled/snapshots/test_number.ambr @@ -67,7 +67,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -79,13 +79,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -157,7 +158,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -169,182 +170,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', - 'via_device_id': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'WLED RGB Light Segment 1 Intensity', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.wled_rgb_light_segment_1_intensity', - 'last_changed': , - 'last_updated': , - 'state': '64', - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].1 - EntityRegistryEntrySnapshot({ - '_display_repr': , - '_partial_repr': , - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.wled_rgb_light_segment_1_intensity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Segment 1 Intensity', - 'platform': 'wled', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aabbccddeeff_intensity_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_intensity-42-intensity].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 'esp8266', - 'id': , - 'identifiers': set({ - tuple( - 'wled', - 'aabbccddeeff', - ), - }), - 'is_new': False, - 'manufacturer': 'WLED', - 'model': 'DIY light', - 'name': 'WLED RGB Light', - 'name_by_user': None, - 'suggested_area': None, - 'sw_version': '0.8.5', - 'via_device_id': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'WLED RGB Light Segment 1 Speed', - 'icon': 'mdi:speedometer', - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'context': , - 'entity_id': 'number.wled_rgb_light_segment_1_speed', - 'last_changed': , - 'last_updated': , - 'state': '16', - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].1 - EntityRegistryEntrySnapshot({ - '_display_repr': , - '_partial_repr': , - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max': 255, - 'min': 0, - 'mode': , - 'step': 1, - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'number', - 'entity_category': , - 'entity_id': 'number.wled_rgb_light_segment_1_speed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': 'mdi:speedometer', - 'original_name': 'Segment 1 Speed', - 'platform': 'wled', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aabbccddeeff_speed_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_speed_state[number.wled_rgb_light_segment_1_speed-42-speed].2 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'http://127.0.0.1', - 'connections': set({ - tuple( - 'mac', - 'aa:bb:cc:dd:ee:ff', - ), - }), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': 'esp8266', - 'id': , - 'identifiers': set({ - tuple( - 'wled', - 'aabbccddeeff', - ), - }), - 'is_new': False, - 'manufacturer': 'WLED', - 'model': 'DIY light', - 'name': 'WLED RGB Light', - 'name_by_user': None, - 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_select.ambr b/tests/components/wled/snapshots/test_select.ambr index f6447f699c9..2998583f8b3 100644 --- a/tests/components/wled/snapshots/test_select.ambr +++ b/tests/components/wled/snapshots/test_select.ambr @@ -69,7 +69,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -81,13 +81,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -96,56 +97,77 @@ 'attributes': ReadOnlyDict({ 'friendly_name': 'WLED RGB Light Segment 1 color palette', 'options': list([ - 'Analogous', - 'April Night', - 'Autumn', - 'Based on Primary', - 'Based on Set', - 'Beach', - 'Beech', - 'Breeze', - 'C9', - 'Cloud', - 'Cyane', 'Default', - 'Departure', - 'Drywet', - 'Fire', - 'Forest', - 'Grintage', - 'Hult', - 'Hult 64', - 'Icefire', - 'Jul', - 'Landscape', - 'Lava', - 'Light Pink', - 'Magenta', - 'Magred', - 'Ocean', - 'Orange & Teal', - 'Orangery', + '* Random Cycle', + '* Color 1', + '* Colors 1&2', + '* Color Gradient', + '* Colors Only', 'Party', - 'Pastel', - 'Primary Color', + 'Cloud', + 'Lava', + 'Ocean', + 'Forest', 'Rainbow', 'Rainbow Bands', - 'Random Cycle', - 'Red & Blue', - 'Rewhi', - 'Rivendell', - 'Sakura', - 'Set Colors', - 'Sherbet', - 'Splash', 'Sunset', - 'Sunset 2', - 'Tertiary', - 'Tiamat', - 'Vintage', - 'Yelblu', + 'Rivendell', + 'Breeze', + 'Red & Blue', 'Yellowout', + 'Analogous', + 'Splash', + 'Pastel', + 'Sunset 2', + 'Beach', + 'Vintage', + 'Departure', + 'Landscape', + 'Beech', + 'Sherbet', + 'Hult', + 'Hult 64', + 'Drywet', + 'Jul', + 'Grintage', + 'Rewhi', + 'Tertiary', + 'Fire', + 'Icefire', + 'Cyane', + 'Light Pink', + 'Autumn', + 'Magenta', + 'Magred', 'Yelmag', + 'Yelblu', + 'Orange & Teal', + 'Tiamat', + 'April Night', + 'Orangery', + 'C9', + 'Sakura', + 'Aurora', + 'Atlantica', + 'C9 2', + 'C9 New', + 'Temperature', + 'Aurora 2', + 'Retro Clown', + 'Candy', + 'Toxy Reaf', + 'Fairy Reaf', + 'Semi Blue', + 'Pink Candy', + 'Red Reaf', + 'Aqua Flash', + 'Yelblu Hot', + 'Lite Light', + 'Red Flash', + 'Blink Red', + 'Red Shift', + 'Red Tide', + 'Candy2', ]), }), 'context': , @@ -153,7 +175,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Random Cycle', + 'state': '* Random Cycle', }) # --- # name: test_color_palette_state[rgb-select.wled_rgb_light_segment_1_color_palette-Icefire-segment-called_with0].1 @@ -163,56 +185,77 @@ 'area_id': None, 'capabilities': dict({ 'options': list([ - 'Analogous', - 'April Night', - 'Autumn', - 'Based on Primary', - 'Based on Set', - 'Beach', - 'Beech', - 'Breeze', - 'C9', - 'Cloud', - 'Cyane', 'Default', - 'Departure', - 'Drywet', - 'Fire', - 'Forest', - 'Grintage', - 'Hult', - 'Hult 64', - 'Icefire', - 'Jul', - 'Landscape', - 'Lava', - 'Light Pink', - 'Magenta', - 'Magred', - 'Ocean', - 'Orange & Teal', - 'Orangery', + '* Random Cycle', + '* Color 1', + '* Colors 1&2', + '* Color Gradient', + '* Colors Only', 'Party', - 'Pastel', - 'Primary Color', + 'Cloud', + 'Lava', + 'Ocean', + 'Forest', 'Rainbow', 'Rainbow Bands', - 'Random Cycle', - 'Red & Blue', - 'Rewhi', - 'Rivendell', - 'Sakura', - 'Set Colors', - 'Sherbet', - 'Splash', 'Sunset', - 'Sunset 2', - 'Tertiary', - 'Tiamat', - 'Vintage', - 'Yelblu', + 'Rivendell', + 'Breeze', + 'Red & Blue', 'Yellowout', + 'Analogous', + 'Splash', + 'Pastel', + 'Sunset 2', + 'Beach', + 'Vintage', + 'Departure', + 'Landscape', + 'Beech', + 'Sherbet', + 'Hult', + 'Hult 64', + 'Drywet', + 'Jul', + 'Grintage', + 'Rewhi', + 'Tertiary', + 'Fire', + 'Icefire', + 'Cyane', + 'Light Pink', + 'Autumn', + 'Magenta', + 'Magred', 'Yelmag', + 'Yelblu', + 'Orange & Teal', + 'Tiamat', + 'April Night', + 'Orangery', + 'C9', + 'Sakura', + 'Aurora', + 'Atlantica', + 'C9 2', + 'C9 New', + 'Temperature', + 'Aurora 2', + 'Retro Clown', + 'Candy', + 'Toxy Reaf', + 'Fairy Reaf', + 'Semi Blue', + 'Pink Candy', + 'Red Reaf', + 'Aqua Flash', + 'Yelblu Hot', + 'Lite Light', + 'Red Flash', + 'Blink Red', + 'Red Shift', + 'Red Tide', + 'Candy2', ]), }), 'config_entry_id': , @@ -255,7 +298,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -267,13 +310,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -291,7 +335,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Playlist 1', + 'state': 'unknown', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_playlist-Playlist 2-playlist-called_with2].1 @@ -328,7 +372,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'playlist', - 'unique_id': 'aabbccddee11_playlist', + 'unique_id': 'aabbccddeeff_playlist', 'unit_of_measurement': None, }) # --- @@ -340,30 +384,31 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:11', + 'aa:bb:cc:dd:ee:ff', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddee11', + 'aabbccddeeff', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGBW Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.6b4', + 'sw_version': '0.99.0b1', 'via_device_id': None, }) # --- @@ -381,7 +426,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'Preset 1', + 'state': 'unknown', }) # --- # name: test_color_palette_state[rgbw-select.wled_rgbw_light_preset-Preset 2-preset-called_with3].1 @@ -418,7 +463,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'preset', - 'unique_id': 'aabbccddee11_preset', + 'unique_id': 'aabbccddeeff_preset', 'unit_of_measurement': None, }) # --- @@ -430,30 +475,31 @@ 'connections': set({ tuple( 'mac', - 'aa:bb:cc:dd:ee:11', + 'aa:bb:cc:dd:ee:ff', ), }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( 'wled', - 'aabbccddee11', + 'aabbccddeeff', ), }), 'is_new': False, 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGBW Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.6b4', + 'sw_version': '0.99.0b1', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/snapshots/test_switch.ambr b/tests/components/wled/snapshots/test_switch.ambr index 6bca0a2ed3b..ee3a72ba872 100644 --- a/tests/components/wled/snapshots/test_switch.ambr +++ b/tests/components/wled/snapshots/test_switch.ambr @@ -3,7 +3,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'duration': 60, - 'fade': True, 'friendly_name': 'WLED RGB Light Nightlight', 'target_brightness': 0, }), @@ -61,7 +60,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -73,13 +72,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -142,7 +142,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -154,13 +154,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -224,7 +225,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -236,13 +237,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- @@ -306,7 +308,7 @@ }), 'disabled_by': None, 'entry_type': None, - 'hw_version': 'esp8266', + 'hw_version': 'esp32', 'id': , 'identifiers': set({ tuple( @@ -318,13 +320,14 @@ 'labels': set({ }), 'manufacturer': 'WLED', - 'model': 'DIY light', + 'model': 'FOSS', + 'model_id': None, 'name': 'WLED RGB Light', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': '0.8.5', + 'sw_version': '0.14.4', 'via_device_id': None, }) # --- diff --git a/tests/components/wled/test_config_flow.py b/tests/components/wled/test_config_flow.py index a1529eda1c7..a1cf515a24b 100644 --- a/tests/components/wled/test_config_flow.py +++ b/tests/components/wled/test_config_flow.py @@ -33,9 +33,7 @@ async def test_full_user_flow_implementation(hass: HomeAssistant) -> None: assert result.get("title") == "WLED RGB Light" assert result.get("type") is FlowResultType.CREATE_ENTRY - assert "data" in result assert result["data"][CONF_HOST] == "192.168.1.123" - assert "result" in result assert result["result"].unique_id == "aabbccddeeff" @@ -167,23 +165,6 @@ async def test_user_device_exists_abort( assert result.get("reason") == "already_configured" -async def test_user_with_cct_channel_abort( - hass: HomeAssistant, - mock_wled: MagicMock, -) -> None: - """Test we abort user flow if WLED device uses a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_USER}, - data={CONF_HOST: "192.168.1.123"}, - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "cct_unsupported" - - @pytest.mark.usefixtures("mock_wled") async def test_zeroconf_without_mac_device_exists_abort( hass: HomeAssistant, @@ -234,31 +215,6 @@ async def test_zeroconf_with_mac_device_exists_abort( assert result.get("reason") == "already_configured" -async def test_zeroconf_with_cct_channel_abort( - hass: HomeAssistant, - mock_wled: MagicMock, -) -> None: - """Test we abort zeroconf flow if WLED device uses a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_ZEROCONF}, - data=zeroconf.ZeroconfServiceInfo( - ip_address=ip_address("192.168.1.123"), - ip_addresses=[ip_address("192.168.1.123")], - hostname="example.local.", - name="mock_name", - port=None, - properties={CONF_MAC: "aabbccddeeff"}, - type="mock_type", - ), - ) - - assert result.get("type") is FlowResultType.ABORT - assert result.get("reason") == "cct_unsupported" - - async def test_options_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: diff --git a/tests/components/wled/test_init.py b/tests/components/wled/test_init.py index f6f1da0d41e..9dfcabd55e3 100644 --- a/tests/components/wled/test_init.py +++ b/tests/components/wled/test_init.py @@ -7,7 +7,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from wled import WLEDConnectionError -from homeassistant.components.wled.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -44,7 +43,6 @@ async def test_load_unload_config_entry( # Ensure everything is cleaned up nicely and are disconnected assert mock_wled.disconnect.call_count == 1 - assert not hass.data.get(DOMAIN) @patch( @@ -69,21 +67,3 @@ async def test_setting_unique_id( """Test we set unique ID if not set yet.""" assert init_integration.runtime_data assert init_integration.unique_id == "aabbccddeeff" - - -async def test_error_config_entry_with_cct_channel( - hass: HomeAssistant, - mock_config_entry: MockConfigEntry, - mock_wled: AsyncMock, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the WLED fails entry setup with a CCT channel.""" - mock_wled.update.return_value.info.leds.cct = True - - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - # Ensure config entry is errored and are connected and disconnected - assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR - assert "has a CCT channel, which is not supported" in caplog.text diff --git a/tests/components/wled/test_light.py b/tests/components/wled/test_light.py index 2b64619e306..58c4aa4e8c6 100644 --- a/tests/components/wled/test_light.py +++ b/tests/components/wled/test_light.py @@ -1,6 +1,5 @@ """Tests for the WLED light platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,14 +8,24 @@ from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, + ATTR_COLOR_TEMP_KELVIN, ATTR_EFFECT, ATTR_HS_COLOR, + ATTR_MAX_COLOR_TEMP_KELVIN, + ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_RGB_COLOR, ATTR_RGBW_COLOR, + ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as LIGHT_DOMAIN, + ColorMode, +) +from homeassistant.components.wled.const import ( + CONF_KEEP_MAIN_LIGHT, + DOMAIN, + SCAN_INTERVAL, ) -from homeassistant.components.wled.const import CONF_KEEP_MAIN_LIGHT, SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_ICON, @@ -30,7 +39,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from tests.common import MockConfigEntry, async_fire_time_changed, load_fixture +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, +) pytestmark = pytest.mark.usefixtures("init_integration") @@ -41,9 +54,9 @@ async def test_rgb_light_state( """Test the creation and values of the WLED lights.""" # First segment of the strip assert (state := hass.states.get("light.wled_rgb_light")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 + assert state.attributes.get(ATTR_BRIGHTNESS) == 255 assert state.attributes.get(ATTR_EFFECT) == "Solid" - assert state.attributes.get(ATTR_HS_COLOR) == (37.412, 100.0) + assert state.attributes.get(ATTR_HS_COLOR) == (218.906, 50.196) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -52,9 +65,9 @@ async def test_rgb_light_state( # Second segment of the strip assert (state := hass.states.get("light.wled_rgb_light_segment_1")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 - assert state.attributes.get(ATTR_EFFECT) == "Blink" - assert state.attributes.get(ATTR_HS_COLOR) == (148.941, 100.0) + assert state.attributes.get(ATTR_BRIGHTNESS) == 255 + assert state.attributes.get(ATTR_EFFECT) == "Wipe" + assert state.attributes.get(ATTR_HS_COLOR) == (40.0, 100.0) assert state.attributes.get(ATTR_ICON) is None assert state.state == STATE_ON @@ -63,7 +76,7 @@ async def test_rgb_light_state( # Test main control of the lightstrip assert (state := hass.states.get("light.wled_rgb_light_main")) - assert state.attributes.get(ATTR_BRIGHTNESS) == 127 + assert state.attributes.get(ATTR_BRIGHTNESS) == 128 assert state.state == STATE_ON assert (entry := entity_registry.async_get("light.wled_rgb_light_main")) @@ -188,8 +201,8 @@ async def test_dynamically_handle_segments( assert not hass.states.get("light.wled_rgb_light_segment_1") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) @@ -327,6 +340,8 @@ async def test_rgbw_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: """Test RGBW support for WLED.""" assert (state := hass.states.get("light.wled_rgbw_light")) assert state.state == STATE_ON + assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ColorMode.RGBW] + assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.RGBW assert state.attributes.get(ATTR_RGBW_COLOR) == (255, 0, 0, 139) await hass.services.async_call( @@ -362,3 +377,34 @@ async def test_single_segment_with_keep_main_light( assert (state := hass.states.get("light.wled_rgb_light_main")) assert state.state == STATE_ON + + +@pytest.mark.parametrize("device_fixture", ["cct"]) +async def test_cct_light(hass: HomeAssistant, mock_wled: MagicMock) -> None: + """Test CCT support for WLED.""" + assert (state := hass.states.get("light.wled_cct_light")) + assert state.state == STATE_ON + assert state.attributes.get(ATTR_SUPPORTED_COLOR_MODES) == [ + ColorMode.COLOR_TEMP, + ColorMode.RGBW, + ] + assert state.attributes.get(ATTR_COLOR_MODE) == ColorMode.COLOR_TEMP + assert state.attributes.get(ATTR_MIN_COLOR_TEMP_KELVIN) == 2000 + assert state.attributes.get(ATTR_MAX_COLOR_TEMP_KELVIN) == 6535 + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) == 2942 + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.wled_cct_light", + ATTR_COLOR_TEMP_KELVIN: 4321, + }, + blocking=True, + ) + assert mock_wled.segment.call_count == 1 + mock_wled.segment.assert_called_with( + cct=130, + on=True, + segment_id=0, + ) diff --git a/tests/components/wled/test_number.py b/tests/components/wled/test_number.py index b692de37282..344eb03bc06 100644 --- a/tests/components/wled/test_number.py +++ b/tests/components/wled/test_number.py @@ -1,6 +1,5 @@ """Tests for the WLED number platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -13,13 +12,13 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) -from homeassistant.components.wled.const import SCAN_INTERVAL +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -128,8 +127,8 @@ async def test_speed_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_select.py b/tests/components/wled/test_select.py index 380af1a286a..364e5fc2034 100644 --- a/tests/components/wled/test_select.py +++ b/tests/components/wled/test_select.py @@ -1,6 +1,5 @@ """Tests for the WLED select platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,18 +8,13 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.select import ATTR_OPTION, DOMAIN as SELECT_DOMAIN -from homeassistant.components.wled.const import SCAN_INTERVAL -from homeassistant.const import ( - ATTR_ENTITY_ID, - SERVICE_SELECT_OPTION, - STATE_UNAVAILABLE, - STATE_UNKNOWN, -) +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_SELECT_OPTION, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -135,8 +129,8 @@ async def test_color_palette_dynamically_handle_segments( assert not hass.states.get("select.wled_rgb_light_segment_1_color_palette") return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) @@ -148,7 +142,7 @@ async def test_color_palette_dynamically_handle_segments( assert ( segment1 := hass.states.get("select.wled_rgb_light_segment_1_color_palette") ) - assert segment1.state == "Random Cycle" + assert segment1.state == "* Random Cycle" # Test adding if segment shows up again, including the master entity mock_wled.update.return_value = return_value @@ -174,39 +168,3 @@ async def test_playlist_unavailable_without_playlists(hass: HomeAssistant) -> No """Test WLED playlist entity is unavailable when playlists are not available.""" assert (state := hass.states.get("select.wled_rgb_light_playlist")) assert state.state == STATE_UNAVAILABLE - - -@pytest.mark.parametrize("device_fixture", ["rgbw"]) -async def test_old_style_preset_active( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_wled: MagicMock, -) -> None: - """Test unknown preset returned (when old style/unknown) preset is active.""" - # Set device preset state to a random number - mock_wled.update.return_value.state.preset = 99 - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert (state := hass.states.get("select.wled_rgbw_light_preset")) - assert state.state == STATE_UNKNOWN - - -@pytest.mark.parametrize("device_fixture", ["rgbw"]) -async def test_old_style_playlist_active( - hass: HomeAssistant, - freezer: FrozenDateTimeFactory, - mock_wled: MagicMock, -) -> None: - """Test when old style playlist cycle is active.""" - # Set device playlist to 0, which meant "cycle" previously. - mock_wled.update.return_value.state.playlist = 0 - - freezer.tick(SCAN_INTERVAL) - async_fire_time_changed(hass) - await hass.async_block_till_done() - - assert (state := hass.states.get("select.wled_rgbw_light_playlist")) - assert state.state == STATE_UNKNOWN diff --git a/tests/components/wled/test_sensor.py b/tests/components/wled/test_sensor.py index 319622e7cb3..8bd5431cf59 100644 --- a/tests/components/wled/test_sensor.py +++ b/tests/components/wled/test_sensor.py @@ -44,7 +44,7 @@ async def test_sensors( == UnitOfElectricCurrent.MILLIAMPERE ) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.CURRENT - assert state.state == "470" + assert state.state == "515" assert ( entry := entity_registry.async_get("sensor.wled_rgb_light_estimated_current") @@ -55,7 +55,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_uptime")) assert state.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.TIMESTAMP assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) is None - assert state.state == "2019-11-11T09:10:00+00:00" + assert state.state == "2019-11-11T08:54:26+00:00" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_uptime")) assert entry.unique_id == "aabbccddeeff_uptime" @@ -64,7 +64,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_free_memory")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfInformation.BYTES - assert state.state == "14600" + assert state.state == "198384" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_free_memory")) @@ -74,7 +74,7 @@ async def test_sensors( assert (state := hass.states.get("sensor.wled_rgb_light_wi_fi_signal")) assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == PERCENTAGE - assert state.state == "76" + assert state.state == "100" assert entry.entity_category is EntityCategory.DIAGNOSTIC assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_signal")) @@ -87,7 +87,7 @@ async def test_sensors( state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == SIGNAL_STRENGTH_DECIBELS_MILLIWATT ) - assert state.state == "-62" + assert state.state == "-43" assert (entry := entity_registry.async_get("sensor.wled_rgb_light_wi_fi_rssi")) assert entry.unique_id == "aabbccddeeff_wifi_rssi" diff --git a/tests/components/wled/test_switch.py b/tests/components/wled/test_switch.py index 6dfd62e363f..48331ffa9cc 100644 --- a/tests/components/wled/test_switch.py +++ b/tests/components/wled/test_switch.py @@ -1,6 +1,5 @@ """Tests for the WLED switch platform.""" -import json from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory @@ -9,7 +8,7 @@ from syrupy.assertion import SnapshotAssertion from wled import Device as WLEDDevice, WLEDConnectionError, WLEDError from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.wled.const import SCAN_INTERVAL +from homeassistant.components.wled.const import DOMAIN, SCAN_INTERVAL from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -22,7 +21,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import async_fire_time_changed, load_fixture +from tests.common import async_fire_time_changed, load_json_object_fixture pytestmark = pytest.mark.usefixtures("init_integration") @@ -144,8 +143,8 @@ async def test_switch_dynamically_handle_segments( # Test adding a segment dynamically... return_value = mock_wled.update.return_value - mock_wled.update.return_value = WLEDDevice( - json.loads(load_fixture("wled/rgb.json")) + mock_wled.update.return_value = WLEDDevice.from_dict( + load_json_object_fixture("rgb.json", DOMAIN) ) freezer.tick(SCAN_INTERVAL) diff --git a/tests/components/wled/test_update.py b/tests/components/wled/test_update.py index c576cdf16f9..a27aa918385 100644 --- a/tests/components/wled/test_update.py +++ b/tests/components/wled/test_update.py @@ -2,8 +2,9 @@ from unittest.mock import MagicMock +from freezegun.api import FrozenDateTimeFactory import pytest -from wled import WLEDError +from wled import Releases, WLEDError from homeassistant.components.update import ( ATTR_INSTALLED_VERSION, @@ -16,6 +17,7 @@ from homeassistant.components.update import ( UpdateDeviceClass, UpdateEntityFeature, ) +from homeassistant.components.wled.const import RELEASES_SCAN_INTERVAL from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -31,6 +33,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from tests.common import async_fire_time_changed + pytestmark = pytest.mark.usefixtures("init_integration") @@ -45,12 +49,12 @@ async def test_update_available( state.attributes[ATTR_ENTITY_PICTURE] == "https://brands.home-assistant.io/_/wled/icon.png" ) - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -64,15 +68,26 @@ async def test_update_available( assert entry.entity_category is EntityCategory.CONFIG -@pytest.mark.parametrize("device_fixture", ["rgb_no_update"]) async def test_update_information_available( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + entity_registry: er.EntityRegistry, + mock_wled_releases: MagicMock, ) -> None: """Test having no update information available at all.""" + mock_wled_releases.releases.return_value = Releases( + beta=None, + stable=None, + ) + + freezer.tick(RELEASES_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE assert state.state == STATE_UNKNOWN - assert state.attributes[ATTR_INSTALLED_VERSION] is None + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" assert state.attributes[ATTR_LATEST_VERSION] is None assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert state.attributes[ATTR_RELEASE_URL] is None @@ -98,12 +113,12 @@ async def test_no_update_available( assert (state := hass.states.get("update.wled_websocket_firmware")) assert state.state == STATE_OFF assert state.attributes.get(ATTR_DEVICE_CLASS) == UpdateDeviceClass.FIRMWARE - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.12.0-b2" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0-b2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" assert state.attributes[ATTR_RELEASE_SUMMARY] is None assert ( state.attributes[ATTR_RELEASE_URL] - == "https://github.com/Aircoookie/WLED/releases/tag/v0.12.0-b2" + == "https://github.com/Aircoookie/WLED/releases/tag/v0.99.0" ) assert ( state.attributes[ATTR_SUPPORTED_FEATURES] @@ -151,8 +166,8 @@ async def test_update_stay_stable( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.5" - assert state.attributes[ATTR_LATEST_VERSION] == "0.12.0" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.14.4" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" await hass.services.async_call( UPDATE_DOMAIN, @@ -161,7 +176,7 @@ async def test_update_stay_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.12.0") + mock_wled.upgrade.assert_called_with(version="0.99.0") @pytest.mark.parametrize("device_fixture", ["rgbw"]) @@ -177,8 +192,8 @@ async def test_update_beta_to_stable( """ assert (state := hass.states.get("update.wled_rgbw_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b4" - assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6" + assert state.attributes[ATTR_INSTALLED_VERSION] == "0.99.0b1" + assert state.attributes[ATTR_LATEST_VERSION] == "0.99.0" await hass.services.async_call( UPDATE_DOMAIN, @@ -187,7 +202,7 @@ async def test_update_beta_to_stable( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.8.6") + mock_wled.upgrade.assert_called_with(version="0.99.0") @pytest.mark.parametrize("device_fixture", ["rgb_single_segment"]) @@ -202,8 +217,8 @@ async def test_update_stay_beta( """ assert (state := hass.states.get("update.wled_rgb_light_firmware")) assert state.state == STATE_ON - assert state.attributes[ATTR_INSTALLED_VERSION] == "0.8.6b1" - assert state.attributes[ATTR_LATEST_VERSION] == "0.8.6b2" + assert state.attributes[ATTR_INSTALLED_VERSION] == "1.0.0b4" + assert state.attributes[ATTR_LATEST_VERSION] == "1.0.0b5" await hass.services.async_call( UPDATE_DOMAIN, @@ -212,4 +227,4 @@ async def test_update_stay_beta( blocking=True, ) assert mock_wled.upgrade.call_count == 1 - mock_wled.upgrade.assert_called_with(version="0.8.6b2") + mock_wled.upgrade.assert_called_with(version="1.0.0b5") diff --git a/tests/components/workday/__init__.py b/tests/components/workday/__init__.py index a7e26765643..17449af8bd1 100644 --- a/tests/components/workday/__init__.py +++ b/tests/components/workday/__init__.py @@ -4,6 +4,8 @@ from __future__ import annotations from typing import Any +from holidays import OPTIONAL + from homeassistant.components.workday.const import ( DEFAULT_EXCLUDES, DEFAULT_NAME, @@ -310,3 +312,26 @@ TEST_LANGUAGE_NO_CHANGE = { "remove_holidays": ["2022-12-04", "2022-12-24,2022-12-26"], "language": "de", } +TEST_NO_OPTIONAL_CATEGORY = { + "name": DEFAULT_NAME, + "country": "CH", + "province": "FR", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": [], + "remove_holidays": [], + "language": "de", +} +TEST_OPTIONAL_CATEGORY = { + "name": DEFAULT_NAME, + "country": "CH", + "province": "FR", + "excludes": DEFAULT_EXCLUDES, + "days_offset": DEFAULT_OFFSET, + "workdays": DEFAULT_WORKDAYS, + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "category": [OPTIONAL], +} diff --git a/tests/components/workday/conftest.py b/tests/components/workday/conftest.py index 33bf98f90c3..081d6ce90db 100644 --- a/tests/components/workday/conftest.py +++ b/tests/components/workday/conftest.py @@ -1,9 +1,9 @@ """Fixtures for Workday integration tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/workday/test_binary_sensor.py b/tests/components/workday/test_binary_sensor.py index e973a9f9c28..a2718c00824 100644 --- a/tests/components/workday/test_binary_sensor.py +++ b/tests/components/workday/test_binary_sensor.py @@ -39,6 +39,8 @@ from . import ( TEST_CONFIG_YESTERDAY, TEST_LANGUAGE_CHANGE, TEST_LANGUAGE_NO_CHANGE, + TEST_NO_OPTIONAL_CATEGORY, + TEST_OPTIONAL_CATEGORY, init_integration, ) @@ -400,3 +402,23 @@ async def test_language_difference_no_change_other_language( """Test skipping if no difference in language naming.""" await init_integration(hass, TEST_LANGUAGE_NO_CHANGE) assert "Changing language from en to en_US" not in caplog.text + + +@pytest.mark.parametrize( + ("config", "end_state"), + [(TEST_OPTIONAL_CATEGORY, "off"), (TEST_NO_OPTIONAL_CATEGORY, "on")], +) +async def test_optional_category( + hass: HomeAssistant, + config: dict[str, Any], + end_state: str, + freezer: FrozenDateTimeFactory, +) -> None: + """Test setup from various configs.""" + # CH, subdiv FR has optional holiday Jan 2nd + freezer.move_to(datetime(2024, 1, 2, 12, tzinfo=UTC)) # Tuesday + await init_integration(hass, config) + + state = hass.states.get("binary_sensor.workday_sensor") + assert state is not None + assert state.state == end_state diff --git a/tests/components/workday/test_config_flow.py b/tests/components/workday/test_config_flow.py index 7eb3065e576..cc83cee93a2 100644 --- a/tests/components/workday/test_config_flow.py +++ b/tests/components/workday/test_config_flow.py @@ -5,11 +5,13 @@ from __future__ import annotations from datetime import datetime from freezegun.api import FrozenDateTimeFactory +from holidays import HALF_DAY, OPTIONAL import pytest from homeassistant import config_entries from homeassistant.components.workday.const import ( CONF_ADD_HOLIDAYS, + CONF_CATEGORY, CONF_EXCLUDES, CONF_OFFSET, CONF_REMOVE_HOLIDAYS, @@ -354,13 +356,14 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor", - "country": "DE", + "country": "CH", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], - "province": None, + "province": "FR", + "category": [OPTIONAL], }, entry_id="1", ) @@ -368,13 +371,14 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: hass, { "name": "Workday Sensor2", - "country": "DE", + "country": "CH", "excludes": ["sat", "sun", "holiday"], "days_offset": 0, "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": ["2023-03-28"], "remove_holidays": [], - "province": None, + "province": "FR", + "category": [OPTIONAL], }, entry_id="2", ) @@ -389,6 +393,8 @@ async def test_options_form_abort_duplicate(hass: HomeAssistant) -> None: "workdays": ["mon", "tue", "wed", "thu", "fri"], "add_holidays": [], "remove_holidays": [], + "province": "FR", + "category": [OPTIONAL], }, ) @@ -602,3 +608,48 @@ async def test_language( state = hass.states.get("binary_sensor.workday_sensor") assert state is not None assert state.state == "on" + + +async def test_form_with_categories(hass: HomeAssistant) -> None: + """Test optional categories.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: "Workday Sensor", + CONF_COUNTRY: "CH", + }, + ) + await hass.async_block_till_done() + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_EXCLUDES: DEFAULT_EXCLUDES, + CONF_OFFSET: DEFAULT_OFFSET, + CONF_WORKDAYS: DEFAULT_WORKDAYS, + CONF_ADD_HOLIDAYS: [], + CONF_REMOVE_HOLIDAYS: [], + CONF_LANGUAGE: "de", + CONF_CATEGORY: [HALF_DAY], + }, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "Workday Sensor" + assert result3["options"] == { + "name": "Workday Sensor", + "country": "CH", + "excludes": ["sat", "sun", "holiday"], + "days_offset": 0, + "workdays": ["mon", "tue", "wed", "thu", "fri"], + "add_holidays": [], + "remove_holidays": [], + "language": "de", + "category": ["half_day"], + } diff --git a/tests/components/worldclock/conftest.py b/tests/components/worldclock/conftest.py new file mode 100644 index 00000000000..74ed82f099a --- /dev/null +++ b/tests/components/worldclock/conftest.py @@ -0,0 +1,66 @@ +"""Fixtures for the Worldclock integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DEFAULT_TIME_STR_FORMAT, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically patch setup.""" + with patch( + "homeassistant.components.worldclock.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Worldclock integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + title=DEFAULT_NAME, + source=SOURCE_USER, + options=get_config, + entry_id="1", + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/worldclock/test_config_flow.py b/tests/components/worldclock/test_config_flow.py new file mode 100644 index 00000000000..dfdb8159b9c --- /dev/null +++ b/tests/components/worldclock/test_config_flow.py @@ -0,0 +1,104 @@ +"""Test the Worldclock config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant import config_entries +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DEFAULT_TIME_STR_FORMAT, + DOMAIN, +) +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + assert len(hass.states.async_all()) == 1 + + state = hass.states.get("sensor.worldclock_sensor") + assert state is not None + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: DEFAULT_TIME_STR_FORMAT, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/worldclock/test_init.py b/tests/components/worldclock/test_init.py new file mode 100644 index 00000000000..5683836c166 --- /dev/null +++ b/tests/components/worldclock/test_init.py @@ -0,0 +1,17 @@ +"""Test Worldclock component setup process.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/worldclock/test_sensor.py b/tests/components/worldclock/test_sensor.py index 00195a49827..a8e3e41e649 100644 --- a/tests/components/worldclock/test_sensor.py +++ b/tests/components/worldclock/test_sensor.py @@ -1,19 +1,32 @@ """The test for the World clock sensor platform.""" +from datetime import tzinfo + import pytest -from homeassistant.core import HomeAssistant +from homeassistant.components.worldclock.const import ( + CONF_TIME_FORMAT, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.const import CONF_NAME, CONF_TIME_ZONE +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util +from tests.common import MockConfigEntry + @pytest.fixture -def time_zone(): +async def time_zone() -> tzinfo | None: """Fixture for time zone.""" - return dt_util.get_time_zone("America/New_York") + return await dt_util.async_get_time_zone("America/New_York") -async def test_time(hass: HomeAssistant, time_zone) -> None: +async def test_time_imported_from_yaml( + hass: HomeAssistant, time_zone: tzinfo | None, issue_registry: ir.IssueRegistry +) -> None: """Test the time at a different location.""" config = {"sensor": {"platform": "worldclock", "time_zone": "America/New_York"}} @@ -29,26 +42,42 @@ async def test_time(hass: HomeAssistant, time_zone) -> None: assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") - -async def test_time_format(hass: HomeAssistant, time_zone) -> None: - """Test time_format setting.""" - time_format = "%a, %b %d, %Y %I:%M %p" - config = { - "sensor": { - "platform": "worldclock", - "time_zone": "America/New_York", - "time_format": time_format, - } - } - - assert await async_setup_component( - hass, - "sensor", - config, + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" ) - await hass.async_block_till_done() + assert issue + assert issue.issue_domain == DOMAIN + + +async def test_time_from_config_entry( + hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry +) -> None: + """Test the time at a different location.""" state = hass.states.get("sensor.worldclock_sensor") assert state is not None - assert state.state == dt_util.now(time_zone=time_zone).strftime(time_format) + assert state.state == dt_util.now(time_zone=time_zone).strftime("%H:%M") + + +@pytest.mark.parametrize( + "get_config", + [ + { + CONF_NAME: DEFAULT_NAME, + CONF_TIME_ZONE: "America/New_York", + CONF_TIME_FORMAT: "%a, %b %d, %Y %I:%M %p", + } + ], +) +async def test_time_format( + hass: HomeAssistant, time_zone: tzinfo | None, loaded_entry: MockConfigEntry +) -> None: + """Test time_format setting.""" + + state = hass.states.get("sensor.worldclock_sensor") + assert state is not None + + assert state.state == dt_util.now(time_zone=time_zone).strftime( + "%a, %b %d, %Y %I:%M %p" + ) diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index 47ef0566dc6..f6093e34261 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -1,10 +1,10 @@ """Common fixtures for the Wyoming tests.""" +from collections.abc import Generator from pathlib import Path from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator from homeassistant.components import stt from homeassistant.components.wyoming import DOMAIN diff --git a/tests/components/wyoming/test_select.py b/tests/components/wyoming/test_select.py index e6ec2c4d432..2438d25b838 100644 --- a/tests/components/wyoming/test_select.py +++ b/tests/components/wyoming/test_select.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, patch from homeassistant.components import assist_pipeline from homeassistant.components.assist_pipeline.pipeline import PipelineData from homeassistant.components.assist_pipeline.select import OPTION_PREFERRED +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -140,3 +141,50 @@ async def test_noise_suppression_level_select( ) assert satellite_device.noise_suppression_level == 2 + + +async def test_vad_sensitivity_select( + hass: HomeAssistant, + satellite_config_entry: ConfigEntry, + satellite_device: SatelliteDevice, +) -> None: + """Test VAD sensitivity select.""" + vs_entity_id = satellite_device.get_vad_sensitivity_entity_id(hass) + assert vs_entity_id + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.DEFAULT + assert satellite_device.vad_sensitivity == VadSensitivity.DEFAULT + + # Change setting + with patch.object(satellite_device, "set_vad_sensitivity") as mock_vs_changed: + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.AGGRESSIVE.value}, + blocking=True, + ) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + # set function should have been called + mock_vs_changed.assert_called_once_with(VadSensitivity.AGGRESSIVE) + + # test restore + satellite_device = await reload_satellite(hass, satellite_config_entry.entry_id) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.RELAXED.value}, + blocking=True, + ) + + assert satellite_device.vad_sensitivity == VadSensitivity.RELAXED diff --git a/tests/components/xiaomi/test_device_tracker.py b/tests/components/xiaomi/test_device_tracker.py index 975e666af68..0f1c36d1fba 100644 --- a/tests/components/xiaomi/test_device_tracker.py +++ b/tests/components/xiaomi/test_device_tracker.py @@ -144,6 +144,7 @@ def mocked_requests(*args, **kwargs): 200, ) _LOGGER.debug("UNKNOWN ROUTE") + return None @patch( diff --git a/tests/components/xiaomi_ble/conftest.py b/tests/components/xiaomi_ble/conftest.py index bb74b3c7af3..8994aec813c 100644 --- a/tests/components/xiaomi_ble/conftest.py +++ b/tests/components/xiaomi_ble/conftest.py @@ -1,9 +1,9 @@ """Session fixtures.""" +from collections.abc import Generator from unittest import mock import pytest -from typing_extensions import Generator class MockServices: diff --git a/tests/components/xiaomi_ble/test_device_trigger.py b/tests/components/xiaomi_ble/test_device_trigger.py index 87a4d340d8c..218a382ada5 100644 --- a/tests/components/xiaomi_ble/test_device_trigger.py +++ b/tests/components/xiaomi_ble/test_device_trigger.py @@ -18,7 +18,6 @@ from tests.common import ( MockConfigEntry, async_capture_events, async_get_device_automations, - async_mock_service, ) from tests.components.bluetooth import inject_bluetooth_service_info_bleak @@ -29,12 +28,6 @@ def get_device_id(mac: str) -> tuple[str, str]: return (BLUETOOTH_DOMAIN, mac) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - async def _async_setup_xiaomi_device( hass: HomeAssistant, mac: str, data: Any | None = None ): @@ -399,7 +392,9 @@ async def test_get_triggers_for_invalid_device_id( async def test_if_fires_on_button_press( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press event trigger firing.""" mac = "54:EF:44:E3:9C:BC" @@ -452,15 +447,17 @@ async def test_if_fires_on_button_press( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_double_button_long_press( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for button press event trigger firing.""" mac = "DC:ED:83:87:12:73" @@ -513,15 +510,17 @@ async def test_if_fires_on_double_button_long_press( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_right_button_press" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_right_button_press" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() async def test_if_fires_on_motion_detected( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for motion event trigger firing.""" mac = "DE:70:E8:B2:39:0C" @@ -567,8 +566,8 @@ async def test_if_fires_on_motion_detected( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "test_trigger_motion_detected" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "test_trigger_motion_detected" assert await hass.config_entries.async_unload(entry.entry_id) await hass.async_block_till_done() @@ -676,7 +675,9 @@ async def test_automation_with_invalid_trigger_event_property( async def test_triggers_for_invalid__model( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], ) -> None: """Test invalid model doesn't return triggers.""" mac = "DE:70:E8:B2:39:0C" diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 462145d16ab..54646d30513 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -1,12 +1,12 @@ """The tests for the Xiaomi vacuum platform.""" +from collections.abc import Generator from datetime import datetime, time, timedelta from unittest import mock from unittest.mock import MagicMock, patch from miio import DeviceException import pytest -from typing_extensions import Generator from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 02246e69269..804b800aaef 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -46,11 +46,27 @@ def main_zone_fixture(): def device_fixture(main_zone): """Mock the yamaha device.""" device = FakeYamahaDevice("http://receiver", "Receiver", zones=[main_zone]) - with patch("rxv.RXV", return_value=device): + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): yield device -async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: +@pytest.fixture(name="device2") +def device2_fixture(main_zone): + """Mock the yamaha device.""" + device = FakeYamahaDevice( + "http://127.0.0.1:80/YamahaRemoteControl/ctrl", "Receiver 2", zones=[main_zone] + ) + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): + yield device + + +async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> None: """Test set up integration with host.""" assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -60,6 +76,28 @@ async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: assert state is not None assert state.state == "off" + with patch("rxv.find", return_value=[device2]): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + + +async def test_setup_attribute_error(hass: HomeAssistant, device, main_zone) -> None: + """Test set up integration encountering an Attribute Error.""" + + with patch("rxv.find", side_effect=AttributeError): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration without host.""" diff --git a/tests/components/yamaha_musiccast/test_config_flow.py b/tests/components/yamaha_musiccast/test_config_flow.py index 321e7250e5a..7629d2401c2 100644 --- a/tests/components/yamaha_musiccast/test_config_flow.py +++ b/tests/components/yamaha_musiccast/test_config_flow.py @@ -1,5 +1,6 @@ """Test config flow.""" +from collections.abc import Generator from unittest.mock import patch from aiomusiccast import MusicCastConnectionException @@ -17,7 +18,7 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -async def silent_ssdp_scanner(hass): +def silent_ssdp_scanner() -> Generator[None]: """Start SSDP component and get Scanner, prevent actual SSDP traffic.""" with ( patch("homeassistant.components.ssdp.Scanner._async_start_ssdp_listeners"), diff --git a/tests/components/yardian/conftest.py b/tests/components/yardian/conftest.py index 26a01f889b7..00e76c4c34f 100644 --- a/tests/components/yardian/conftest.py +++ b/tests/components/yardian/conftest.py @@ -1,9 +1,9 @@ """Common fixtures for the Yardian tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch import pytest -from typing_extensions import Generator @pytest.fixture diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index f6aa9a28ac0..6b48b32fd62 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -1,6 +1,5 @@ """The tests for YoLink device triggers.""" -import pytest from pytest_unordered import unordered from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER @@ -11,17 +10,7 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import ( - MockConfigEntry, - async_get_device_automations, - async_mock_service, -) - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "yolink", "automation") +from tests.common import MockConfigEntry, async_get_device_automations async def test_get_triggers( @@ -120,7 +109,9 @@ async def test_get_triggers_exception( async def test_if_fires_on_event( - hass: HomeAssistant, calls: list[ServiceCall], device_registry: dr.DeviceRegistry + hass: HomeAssistant, + service_calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, ) -> None: """Test for event triggers firing.""" mac_address = "12:34:56:AB:CD:EF" @@ -166,5 +157,5 @@ async def test_if_fires_on_event( }, ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" diff --git a/tests/components/youtube/__init__.py b/tests/components/youtube/__init__.py index 1b559f0f1c4..31125d3a71e 100644 --- a/tests/components/youtube/__init__.py +++ b/tests/components/youtube/__init__.py @@ -1,8 +1,8 @@ """Tests for the YouTube integration.""" +from collections.abc import AsyncGenerator import json -from typing_extensions import AsyncGenerator from youtubeaio.models import YouTubeChannel, YouTubePlaylistItem, YouTubeSubscription from youtubeaio.types import AuthScope diff --git a/tests/components/zamg/conftest.py b/tests/components/zamg/conftest.py index 1795baa7fad..9fa4f333ef8 100644 --- a/tests/components/zamg/conftest.py +++ b/tests/components/zamg/conftest.py @@ -1,10 +1,10 @@ """Fixtures for Zamg integration tests.""" +from collections.abc import Generator import json from unittest.mock import MagicMock, patch import pytest -from typing_extensions import Generator from zamg import ZamgData as ZamgDevice from homeassistant.components.zamg.const import CONF_STATION_ID, DOMAIN diff --git a/tests/components/zeversolar/snapshots/test_diagnostics.ambr b/tests/components/zeversolar/snapshots/test_diagnostics.ambr index eebc8468076..4090a3262ba 100644 --- a/tests/components/zeversolar/snapshots/test_diagnostics.ambr +++ b/tests/components/zeversolar/snapshots/test_diagnostics.ambr @@ -10,6 +10,7 @@ # name: test_entry_diagnostics dict({ 'communication_status': 'OK', + 'energy_today': 123.4, 'hardware_version': 'M10', 'meter_status': 'OK', 'num_inverters': 1, diff --git a/tests/components/zha/common.py b/tests/components/zha/common.py index a8bec33a23a..1dd1e5f81aa 100644 --- a/tests/components/zha/common.py +++ b/tests/components/zha/common.py @@ -1,19 +1,12 @@ """Common test objects.""" -import asyncio from datetime import timedelta -import math -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock import zigpy.zcl import zigpy.zcl.foundation as zcl_f -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.helpers import ( - async_get_zha_config_value, - get_zha_gateway, -) +from homeassistant.components.zha.helpers import ZHADeviceProxy from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util @@ -98,7 +91,7 @@ def make_attribute(attrid, value, status=0): return attr -def send_attribute_report(hass, cluster, attrid, value): +def send_attribute_report(hass: HomeAssistant, cluster, attrid, value): """Send a single attribute report.""" return send_attributes_report(hass, cluster, {attrid: value}) @@ -131,7 +124,9 @@ async def send_attributes_report( await hass.async_block_till_done() -def find_entity_id(domain, zha_device, hass, qualifier=None): +def find_entity_id( + domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant, qualifier=None +) -> str | None: """Find the entity id under the testing. This is used to get the entity id in order to get the state from the state @@ -144,11 +139,13 @@ def find_entity_id(domain, zha_device, hass, qualifier=None): for entity_id in entities: if qualifier in entity_id: return entity_id - else: - return entities[0] + return None + return entities[0] -def find_entity_ids(domain, zha_device, hass): +def find_entity_ids( + domain: str, zha_device: ZHADeviceProxy, hass: HomeAssistant +) -> list[str]: """Find the entity ids under the testing. This is used to get the entity id in order to get the state from the state @@ -163,7 +160,7 @@ def find_entity_ids(domain, zha_device, hass): ] -def async_find_group_entity_id(hass, domain, group): +def async_find_group_entity_id(hass: HomeAssistant, domain, group): """Find the group entity id under test.""" entity_id = f"{domain}.coordinator_manufacturer_coordinator_model_{group.name.lower().replace(' ', '_')}" @@ -172,13 +169,6 @@ def async_find_group_entity_id(hass, domain, group): return entity_id -async def async_enable_traffic(hass, zha_devices, enabled=True): - """Allow traffic to flow through the gateway and the ZHA device.""" - for zha_device in zha_devices: - zha_device.update_available(enabled) - await hass.async_block_till_done() - - def make_zcl_header( command_id: int, global_command: bool = True, tsn: int = 1 ) -> zcl_f.ZCLHeader: @@ -199,57 +189,8 @@ def reset_clusters(clusters): cluster.write_attributes.reset_mock() -async def async_test_rejoin(hass, zigpy_device, clusters, report_counts, ep_id=1): - """Test device rejoins.""" - reset_clusters(clusters) - - zha_gateway = get_zha_gateway(hass) - await zha_gateway.async_device_initialized(zigpy_device) - await hass.async_block_till_done() - for cluster, reports in zip(clusters, report_counts, strict=False): - assert cluster.bind.call_count == 1 - assert cluster.bind.await_count == 1 - if reports: - assert cluster.configure_reporting.call_count == 0 - assert cluster.configure_reporting.await_count == 0 - assert cluster.configure_reporting_multiple.call_count == math.ceil( - reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ - ) - assert cluster.configure_reporting_multiple.await_count == math.ceil( - reports / zha_const.REPORT_CONFIG_ATTR_PER_REQ - ) - else: - # no reports at all - assert cluster.configure_reporting.call_count == reports - assert cluster.configure_reporting.await_count == reports - assert cluster.configure_reporting_multiple.call_count == reports - assert cluster.configure_reporting_multiple.await_count == reports - - -async def async_wait_for_updates(hass): - """Wait until all scheduled updates are executed.""" - await hass.async_block_till_done() - await asyncio.sleep(0) - await asyncio.sleep(0) - await hass.async_block_till_done() - - -async def async_shift_time(hass): +async def async_shift_time(hass: HomeAssistant): """Shift time to cause call later tasks to run.""" next_update = dt_util.utcnow() + timedelta(seconds=11) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - - -def patch_zha_config(component: str, overrides: dict[tuple[str, str], Any]): - """Patch the ZHA custom configuration defaults.""" - - def new_get_config(config_entry, section, config_key, default): - if (section, config_key) in overrides: - return overrides[section, config_key] - return async_get_zha_config_value(config_entry, section, config_key, default) - - return patch( - f"homeassistant.components.zha.{component}.async_get_zha_config_value", - side_effect=new_get_config, - ) diff --git a/tests/components/zha/conftest.py b/tests/components/zha/conftest.py index 410eaceda76..a9f4c51d75d 100644 --- a/tests/components/zha/conftest.py +++ b/tests/components/zha/conftest.py @@ -1,6 +1,6 @@ """Test configuration for the ZHA component.""" -from collections.abc import Callable +from collections.abc import Generator import itertools import time from typing import Any @@ -8,7 +8,6 @@ from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import warnings import pytest -from typing_extensions import Generator import zigpy from zigpy.application import ControllerApplication import zigpy.backups @@ -25,14 +24,9 @@ from zigpy.zcl.clusters.general import Basic, Groups from zigpy.zcl.foundation import Status import zigpy.zdo.types as zdo_t -import homeassistant.components.zha.core.const as zha_const -import homeassistant.components.zha.core.device as zha_core_device -from homeassistant.components.zha.core.gateway import ZHAGateway -from homeassistant.components.zha.core.helpers import get_zha_gateway +import homeassistant.components.zha.const as zha_const from homeassistant.core import HomeAssistant -from homeassistant.helpers import restore_state from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util from .common import patch_cluster as common_patch_cluster @@ -44,17 +38,6 @@ FIXTURE_GRP_NAME = "fixture group" COUNTER_NAMES = ["counter_1", "counter_2", "counter_3"] -@pytest.fixture(scope="module", autouse=True) -def disable_request_retry_delay(): - """Disable ZHA request retrying delay to speed up failures.""" - - with patch( - "homeassistant.components.zha.core.cluster_handlers.RETRYABLE_REQUEST_DECORATOR", - zigpy.util.retryable_request(tries=3, delay=0), - ): - yield - - @pytest.fixture(scope="module", autouse=True) def globally_load_quirks(): """Load quirks automatically so that ZHA tests run deterministically in isolation. @@ -128,6 +111,9 @@ class _FakeApp(ControllerApplication): ) -> None: pass + def _persist_coordinator_model_strings_in_db(self) -> None: + pass + def _wrap_mock_instance(obj: Any) -> MagicMock: """Auto-mock every attribute and method in an object.""" @@ -168,6 +154,8 @@ async def zigpy_app_controller(): app.state.node_info.nwk = 0x0000 app.state.node_info.ieee = zigpy.types.EUI64.convert("00:15:8d:00:02:32:4f:32") + app.state.node_info.manufacturer = "Coordinator Manufacturer" + app.state.node_info.model = "Coordinator Model" app.state.network_info.pan_id = 0x1234 app.state.network_info.extended_pan_id = app.state.node_info.ieee app.state.network_info.channel = 15 @@ -202,10 +190,14 @@ async def zigpy_app_controller(): async def config_entry_fixture() -> MockConfigEntry: """Fixture representing a config entry.""" return MockConfigEntry( - version=3, + version=4, domain=zha_const.DOMAIN, data={ - zigpy.config.CONF_DEVICE: {zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0"}, + zigpy.config.CONF_DEVICE: { + zigpy.config.CONF_DEVICE_PATH: "/dev/ttyUSB0", + zigpy.config.CONF_DEVICE_BAUDRATE: 115200, + zigpy.config.CONF_DEVICE_FLOW_CONTROL: "hardware", + }, zha_const.CONF_RADIO_TYPE: "ezsp", }, options={ @@ -280,170 +272,6 @@ def cluster_handler(): return cluster_handler -@pytest.fixture -def zigpy_device_mock(zigpy_app_controller): - """Make a fake device using the specified cluster classes.""" - - def _mock_dev( - endpoints, - ieee="00:0d:6f:00:0a:90:69:e7", - manufacturer="FakeManufacturer", - model="FakeModel", - node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - nwk=0xB79C, - patch_cluster=True, - quirk=None, - attributes=None, - ): - """Make a fake device using the specified cluster classes.""" - device = zigpy.device.Device( - zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk - ) - device.manufacturer = manufacturer - device.model = model - device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] - device.last_seen = time.time() - - for epid, ep in endpoints.items(): - endpoint = device.add_endpoint(epid) - endpoint.device_type = ep[SIG_EP_TYPE] - endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) - endpoint.request = AsyncMock() - - for cluster_id in ep.get(SIG_EP_INPUT, []): - endpoint.add_input_cluster(cluster_id) - - for cluster_id in ep.get(SIG_EP_OUTPUT, []): - endpoint.add_output_cluster(cluster_id) - - device.status = zigpy.device.Status.ENDPOINTS_INIT - - if quirk: - device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) - else: - # Allow zigpy to apply quirks if we don't pass one explicitly - device = zigpy.quirks.get_device(device) - - if patch_cluster: - for endpoint in (ep for epid, ep in device.endpoints.items() if epid): - endpoint.request = AsyncMock(return_value=[0]) - for cluster in itertools.chain( - endpoint.in_clusters.values(), endpoint.out_clusters.values() - ): - common_patch_cluster(cluster) - - if attributes is not None: - for ep_id, clusters in attributes.items(): - for cluster_name, attrs in clusters.items(): - cluster = getattr(device.endpoints[ep_id], cluster_name) - - for name, value in attrs.items(): - attr_id = cluster.find_attribute(name).id - cluster._attr_cache[attr_id] = value - - return device - - return _mock_dev - - -@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) -@pytest.fixture -def zha_device_joined(hass, setup_zha): - """Return a newly joined ZHA device.""" - setup_zha_fixture = setup_zha - - async def _zha_device(zigpy_dev, *, setup_zha: bool = True): - zigpy_dev.last_seen = time.time() - - if setup_zha: - await setup_zha_fixture() - - zha_gateway = get_zha_gateway(hass) - zha_gateway.application_controller.devices[zigpy_dev.ieee] = zigpy_dev - await zha_gateway.async_device_initialized(zigpy_dev) - await hass.async_block_till_done() - return zha_gateway.get_device(zigpy_dev.ieee) - - return _zha_device - - -@patch("homeassistant.components.zha.setup_quirks", MagicMock(return_value=True)) -@pytest.fixture -def zha_device_restored(hass, zigpy_app_controller, setup_zha): - """Return a restored ZHA device.""" - setup_zha_fixture = setup_zha - - async def _zha_device(zigpy_dev, *, last_seen=None, setup_zha: bool = True): - zigpy_app_controller.devices[zigpy_dev.ieee] = zigpy_dev - - if last_seen is not None: - zigpy_dev.last_seen = last_seen - - if setup_zha: - await setup_zha_fixture() - - zha_gateway = get_zha_gateway(hass) - return zha_gateway.get_device(zigpy_dev.ieee) - - return _zha_device - - -@pytest.fixture(params=["zha_device_joined", "zha_device_restored"]) -def zha_device_joined_restored(request: pytest.FixtureRequest): - """Join or restore ZHA device.""" - named_method = request.getfixturevalue(request.param) - named_method.name = request.param - return named_method - - -@pytest.fixture -def zha_device_mock( - hass: HomeAssistant, config_entry, zigpy_device_mock -) -> Callable[..., zha_core_device.ZHADevice]: - """Return a ZHA Device factory.""" - - def _zha_device( - endpoints=None, - ieee="00:11:22:33:44:55:66:77", - manufacturer="mock manufacturer", - model="mock model", - node_desc=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - patch_cluster=True, - ) -> zha_core_device.ZHADevice: - if endpoints is None: - endpoints = { - 1: { - "in_clusters": [0, 1, 8, 768], - "out_clusters": [0x19], - "device_type": 0x0105, - }, - 2: { - "in_clusters": [0], - "out_clusters": [6, 8, 0x19, 768], - "device_type": 0x0810, - }, - } - zigpy_device = zigpy_device_mock( - endpoints, ieee, manufacturer, model, node_desc, patch_cluster=patch_cluster - ) - return zha_core_device.ZHADevice( - hass, - zigpy_device, - ZHAGateway(hass, {}, config_entry), - ) - - return _zha_device - - -@pytest.fixture -def hass_disable_services(hass): - """Mock services.""" - with patch.object( - hass, "services", MagicMock(has_service=MagicMock(return_value=True)) - ): - yield hass - - @pytest.fixture(autouse=True) def speed_up_radio_mgr(): """Speed up the radio manager connection time by removing delays.""" @@ -523,31 +351,66 @@ def network_backup() -> zigpy.backups.NetworkBackup: @pytest.fixture -def core_rs(hass_storage: dict[str, Any]) -> Callable[[str, Any, dict[str, Any]], None]: - """Core.restore_state fixture.""" +def zigpy_device_mock(zigpy_app_controller): + """Make a fake device using the specified cluster classes.""" - def _storage(entity_id: str, state: str, attributes: dict[str, Any]) -> None: - now = dt_util.utcnow().isoformat() + def _mock_dev( + endpoints, + ieee="00:0d:6f:00:0a:90:69:e7", + manufacturer="FakeManufacturer", + model="FakeModel", + node_descriptor=b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", + nwk=0xB79C, + patch_cluster=True, + quirk=None, + attributes=None, + ): + """Make a fake device using the specified cluster classes.""" + device = zigpy.device.Device( + zigpy_app_controller, zigpy.types.EUI64.convert(ieee), nwk + ) + device.manufacturer = manufacturer + device.model = model + device.node_desc = zdo_t.NodeDescriptor.deserialize(node_descriptor)[0] + device.last_seen = time.time() - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "attributes": attributes, - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], - } + for epid, ep in endpoints.items(): + endpoint = device.add_endpoint(epid) + endpoint.device_type = ep[SIG_EP_TYPE] + endpoint.profile_id = ep.get(SIG_EP_PROFILE, 0x0104) + endpoint.request = AsyncMock() - return _storage + for cluster_id in ep.get(SIG_EP_INPUT, []): + endpoint.add_input_cluster(cluster_id) + + for cluster_id in ep.get(SIG_EP_OUTPUT, []): + endpoint.add_output_cluster(cluster_id) + + device.status = zigpy.device.Status.ENDPOINTS_INIT + + if quirk: + device = quirk(zigpy_app_controller, device.ieee, device.nwk, device) + else: + # Allow zigpy to apply quirks if we don't pass one explicitly + device = zigpy.quirks.get_device(device) + + if patch_cluster: + for endpoint in (ep for epid, ep in device.endpoints.items() if epid): + endpoint.request = AsyncMock(return_value=[0]) + for cluster in itertools.chain( + endpoint.in_clusters.values(), endpoint.out_clusters.values() + ): + common_patch_cluster(cluster) + + if attributes is not None: + for ep_id, clusters in attributes.items(): + for cluster_name, attrs in clusters.items(): + cluster = getattr(device.endpoints[ep_id], cluster_name) + + for name, value in attrs.items(): + attr_id = cluster.find_attribute(name).id + cluster._attr_cache[attr_id] = value + + return device + + return _mock_dev diff --git a/tests/components/zha/test_alarm_control_panel.py b/tests/components/zha/test_alarm_control_panel.py index 8d3bd76ef61..3473a9b00ad 100644 --- a/tests/components/zha/test_alarm_control_panel.py +++ b/tests/components/zha/test_alarm_control_panel.py @@ -4,10 +4,17 @@ from unittest.mock import AsyncMock, call, patch, sentinel import pytest from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import security import zigpy.zcl.foundation as zcl_f from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( ATTR_ENTITY_ID, STATE_ALARM_ARMED_AWAY, @@ -15,12 +22,11 @@ from homeassistant.const import ( STATE_ALARM_ARMED_NIGHT, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant -from .common import async_enable_traffic, find_entity_id +from .common import find_entity_id from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -39,44 +45,40 @@ def alarm_control_panel_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - @patch( "zigpy.zcl.clusters.security.IasAce.client_command", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) async def test_alarm_control_panel( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA alarm control panel platform.""" - zha_device = await zha_device_joined_restored(zigpy_device) - cluster = zigpy_device.endpoints.get(1).ias_ace - entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].ias_ace assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the panel was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to STATE_ALARM_DISARMED assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED # arm_away from HA @@ -255,8 +257,30 @@ async def test_alarm_control_panel( # reset the panel await reset_alarm_panel(hass, cluster, entity_id) + await hass.services.async_call( + ALARM_DOMAIN, + "alarm_trigger", + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + await hass.async_block_till_done() + assert hass.states.get(entity_id).state == STATE_ALARM_TRIGGERED + assert cluster.client_command.call_count == 1 + assert cluster.client_command.await_count == 1 + assert cluster.client_command.call_args == call( + 4, + security.IasAce.PanelStatus.In_Alarm, + 0, + security.IasAce.AudibleNotification.Default_Sound, + security.IasAce.AlarmStatus.Emergency_Panic, + ) -async def reset_alarm_panel(hass, cluster, entity_id): + # reset the panel + await reset_alarm_panel(hass, cluster, entity_id) + cluster.client_command.reset_mock() + + +async def reset_alarm_panel(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Reset the state of the alarm panel.""" cluster.client_command.reset_mock() await hass.services.async_call( diff --git a/tests/components/zha/test_api.py b/tests/components/zha/test_api.py index ed3394aafba..7aff6d81f5d 100644 --- a/tests/components/zha/test_api.py +++ b/tests/components/zha/test_api.py @@ -6,12 +6,12 @@ from typing import TYPE_CHECKING from unittest.mock import AsyncMock, MagicMock, call, patch import pytest +from zha.application.const import RadioType import zigpy.backups import zigpy.state from homeassistant.components.zha import api -from homeassistant.components.zha.core.const import RadioType -from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.helpers import get_zha_gateway_proxy from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -41,7 +41,7 @@ async def test_async_get_network_settings_inactive( """Test reading settings with an inactive ZHA installation.""" await setup_zha() - gateway = get_zha_gateway(hass) + gateway = get_zha_gateway_proxy(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) backup = zigpy.backups.NetworkBackup() @@ -53,7 +53,7 @@ async def test_async_get_network_settings_inactive( controller.new = AsyncMock(return_value=zigpy_app_controller) with patch.dict( - "homeassistant.components.zha.core.const.RadioType._member_map_", + "homeassistant.components.zha.api.RadioType._member_map_", ezsp=MagicMock(controller=controller, description="EZSP"), ): settings = await api.async_get_network_settings(hass) @@ -68,7 +68,7 @@ async def test_async_get_network_settings_missing( """Test reading settings with an inactive ZHA installation, no valid channel.""" await setup_zha() - gateway = get_zha_gateway(hass) + gateway = get_zha_gateway_proxy(hass) await hass.config_entries.async_unload(gateway.config_entry.entry_id) # Network settings were never loaded for whatever reason diff --git a/tests/components/zha/test_base.py b/tests/components/zha/test_base.py deleted file mode 100644 index 203df2ffda5..00000000000 --- a/tests/components/zha/test_base.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Test ZHA base cluster handlers module.""" - -from homeassistant.components.zha.core.cluster_handlers import parse_and_log_command - -from .test_cluster_handlers import ( # noqa: F401 - endpoint, - poll_control_ch, - zigpy_coordinator_device, -) - - -def test_parse_and_log_command(poll_control_ch) -> None: # noqa: F811 - """Test that `parse_and_log_command` correctly parses a known command.""" - assert parse_and_log_command(poll_control_ch, 0x00, 0x01, []) == "fast_poll_stop" - - -def test_parse_and_log_command_unknown(poll_control_ch) -> None: # noqa: F811 - """Test that `parse_and_log_command` correctly parses an unknown command.""" - assert parse_and_log_command(poll_control_ch, 0x00, 0xAB, []) == "0xAB" diff --git a/tests/components/zha/test_binary_sensor.py b/tests/components/zha/test_binary_sensor.py index 8276223926d..419823b3b52 100644 --- a/tests/components/zha/test_binary_sensor.py +++ b/tests/components/zha/test_binary_sensor.py @@ -1,54 +1,25 @@ """Test ZHA binary sensor.""" -from collections.abc import Callable -from typing import Any from unittest.mock import patch import pytest -import zigpy.profiles.zha -from zigpy.zcl.clusters import general, measurement, security +from zigpy.profiles import zha +from zigpy.zcl.clusters import general -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import async_mock_load_restore_state_from_storage - -DEVICE_IAS = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ZONE, - SIG_EP_INPUT: [security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - } -} - - -DEVICE_OCCUPANCY = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, - SIG_EP_INPUT: [measurement.OccupancySensing.cluster_id], - SIG_EP_OUTPUT: [], - } -} - - -DEVICE_ONOFF = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SENSOR, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - } -} +ON = 1 +OFF = 0 @pytest.fixture(autouse=True) @@ -58,121 +29,51 @@ def binary_sensor_platform_only(): "homeassistant.components.zha.PLATFORMS", ( Platform.BINARY_SENSOR, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, + Platform.SENSOR, ), ): yield -async def async_test_binary_sensor_on_off(hass, cluster, entity_id): - """Test getting on and off messages for binary sensors.""" - # binary sensor on - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) - assert hass.states.get(entity_id).state == STATE_ON - - # binary sensor off - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) - assert hass.states.get(entity_id).state == STATE_OFF - - -async def async_test_iaszone_on_off(hass, cluster, entity_id): - """Test getting on and off messages for iaszone binary sensors.""" - # binary sensor on - cluster.listener_event("cluster_command", 1, 0, [1]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_ON - - # binary sensor off - cluster.listener_event("cluster_command", 1, 0, [0]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF - - # check that binary sensor remains off when non-alarm bits change - cluster.listener_event("cluster_command", 1, 0, [0b1111111100]) - await hass.async_block_till_done() - assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize( - ("device", "on_off_test", "cluster_name", "reporting", "name"), - [ - ( - DEVICE_IAS, - async_test_iaszone_on_off, - "ias_zone", - (0,), - "FakeManufacturer FakeModel IAS zone", - ), - ( - DEVICE_OCCUPANCY, - async_test_binary_sensor_on_off, - "occupancy", - (1,), - "FakeManufacturer FakeModel Occupancy", - ), - ], -) async def test_binary_sensor( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, - device, - on_off_test, - cluster_name, - reporting, - name, ) -> None: """Test ZHA binary_sensor platform.""" - zigpy_device = zigpy_device_mock(device) - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, + SIG_EP_INPUT: [general.Basic.cluster_id], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + ) + cluster = zigpy_device.endpoints[1].out_clusters[general.OnOff.cluster_id] + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device_proxy, hass) assert entity_id is not None - assert hass.states.get(entity_id).name == name - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the sensors exist and are in the unavailable state - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - await async_enable_traffic(hass, [zha_device]) - - # test that the sensors exist and are in the off state assert hass.states.get(entity_id).state == STATE_OFF - # test getting messages that trigger and reset the sensors - cluster = getattr(zigpy_device.endpoints[1], cluster_name) - await on_off_test(hass, cluster, entity_id) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} + ) + assert hass.states.get(entity_id).state == STATE_ON - # test rejoin - await async_test_rejoin(hass, zigpy_device, [cluster], reporting) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} + ) assert hass.states.get(entity_id).state == STATE_OFF - - -@pytest.mark.parametrize( - "restored_state", - [ - STATE_ON, - STATE_OFF, - ], -) -async def test_onoff_binary_sensor_restore_state( - hass: HomeAssistant, - zigpy_device_mock, - core_rs: Callable[[str, Any, dict[str, Any]], None], - zha_device_restored, - restored_state: str, -) -> None: - """Test ZHA OnOff binary_sensor restores last state from HA.""" - - entity_id = "binary_sensor.fakemanufacturer_fakemodel_opening" - core_rs(entity_id, state=restored_state, attributes={}) - await async_mock_load_restore_state_from_storage(hass) - - zigpy_device = zigpy_device_mock(DEVICE_ONOFF) - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass) - - assert entity_id is not None - assert hass.states.get(entity_id).state == restored_state diff --git a/tests/components/zha/test_button.py b/tests/components/zha/test_button.py index fdcc0d7271c..574805db5f6 100644 --- a/tests/components/zha/test_button.py +++ b/tests/components/zha/test_button.py @@ -1,29 +1,21 @@ """Test ZHA button.""" -from typing import Final -from unittest.mock import call, patch +from unittest.mock import patch from freezegun import freeze_time import pytest -from zhaquirks.const import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zhaquirks.tuya.ts0601_valve import ParksideTuyaValveManufCluster -from zigpy.const import SIG_EP_PROFILE -from zigpy.exceptions import ZigbeeException +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha -from zigpy.quirks import CustomCluster, CustomDevice -from zigpy.quirks.v2 import add_to_registry_v2 -import zigpy.types as t -from zigpy.zcl.clusters import general, security -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.button import DOMAIN, SERVICE_PRESS, ButtonDeviceClass +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -32,11 +24,9 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er -from .common import find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id @pytest.fixture(autouse=True) @@ -44,106 +34,53 @@ def button_platform_only(): """Only set up the button and required base platforms to speed up tests.""" with patch( "homeassistant.components.zha.PLATFORMS", - ( - Platform.BINARY_SENSOR, - Platform.BUTTON, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, - Platform.SENSOR, - Platform.SWITCH, - ), + (Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR), ): yield @pytest.fixture -async def contact_sensor( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Contact sensor fixture.""" +async def setup_zha_integration(hass: HomeAssistant, setup_zha): + """Set up ZHA component.""" - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - security.IasZone.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ZONE, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].identify - - -class FrostLockQuirk(CustomDevice): - """Quirk with frost lock attribute.""" - - class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): - """Tuya manufacturer specific cluster.""" - - cluster_id = 0xEF00 - ep_attribute = "tuya_manufacturer" - - attributes = {0xEF01: ("frost_lock_reset", t.Bool)} - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def tuya_water_valve( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Tuya Water Valve fixture.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - general.OnOff.cluster_id, - ParksideTuyaValveManufCluster.cluster_id, - ], - OUTPUT_CLUSTERS: [general.Time.cluster_id, general.Ota.cluster_id], - }, - }, - manufacturer="_TZE200_htnnfasr", - model="TS0601", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].tuya_manufacturer + # if we call this in the test itself the test hangs forever + await setup_zha() @freeze_time("2021-11-04 17:37:00", tz_offset=-1) async def test_button( - hass: HomeAssistant, entity_registry: er.EntityRegistry, contact_sensor + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha_integration, # pylint: disable=unused-argument + zigpy_device_mock, ) -> None: """Test ZHA button platform.""" - zha_device, cluster = contact_sensor - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass) + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SENSOR, + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + ) + cluster = zigpy_device.endpoints[1].identify + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.BUTTON, zha_device_proxy, hass) assert entity_id is not None state = hass.states.get(entity_id) @@ -175,198 +112,3 @@ async def test_button( assert state assert state.state == "2021-11-04T16:37:00+00:00" assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.IDENTIFY - - -async def test_frost_unlock( - hass: HomeAssistant, entity_registry: er.EntityRegistry, tuya_water_valve -) -> None: - """Test custom frost unlock ZHA button.""" - - zha_device, cluster = tuya_water_valve - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="frost_lock_reset") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART - - entry = entity_registry.async_get(entity_id) - assert entry - assert entry.entity_category == EntityCategory.CONFIG - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert cluster.write_attributes.mock_calls == [ - call({"frost_lock_reset": 0}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.attributes[ATTR_DEVICE_CLASS] == ButtonDeviceClass.RESTART - - cluster.write_attributes.reset_mock() - cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - - # There are three retries - assert cluster.write_attributes.mock_calls == [ - call({"frost_lock_reset": 0}, manufacturer=None), - call({"frost_lock_reset": 0}, manufacturer=None), - call({"frost_lock_reset": 0}, manufacturer=None), - ] - - -class FakeManufacturerCluster(CustomCluster, ManufacturerSpecificCluster): - """Fake manufacturer cluster.""" - - cluster_id: Final = 0xFFF3 - ep_attribute: Final = "mfg_identify" - - class AttributeDefs(zcl_f.BaseAttributeDefs): - """Attribute definitions.""" - - feed: Final = zcl_f.ZCLAttributeDef( - id=0x0000, type=t.uint8_t, access="rw", is_manufacturer_specific=True - ) - - class ServerCommandDefs(zcl_f.BaseCommandDefs): - """Server command definitions.""" - - self_test: Final = zcl_f.ZCLCommandDef( - id=0x00, schema={"identify_time": t.uint16_t}, direction=False - ) - - -( - add_to_registry_v2("Fake_Model", "Fake_Manufacturer") - .replaces(FakeManufacturerCluster) - .command_button( - FakeManufacturerCluster.ServerCommandDefs.self_test.name, - FakeManufacturerCluster.cluster_id, - command_args=(5,), - ) - .write_attr_button( - FakeManufacturerCluster.AttributeDefs.feed.name, - 2, - FakeManufacturerCluster.cluster_id, - ) -) - - -@pytest.fixture -async def custom_button_device( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Button device fixture for quirks button tests.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - FakeManufacturerCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.REMOTE_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - manufacturer="Fake_Model", - model="Fake_Manufacturer", - ) - - zigpy_device.endpoints[1].mfg_identify.PLUGGED_ATTR_READS = { - FakeManufacturerCluster.AttributeDefs.feed.name: 0, - } - update_attribute_cache(zigpy_device.endpoints[1].mfg_identify) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].mfg_identify - - -@freeze_time("2021-11-04 17:37:00", tz_offset=-1) -async def test_quirks_command_button(hass: HomeAssistant, custom_button_device) -> None: - """Test ZHA button platform.""" - - zha_device, cluster = custom_button_device - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="self_test") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert len(cluster.request.mock_calls) == 1 - assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == 0 - assert cluster.request.call_args[0][3] == 5 # duration in seconds - - state = hass.states.get(entity_id) - assert state - assert state.state == "2021-11-04T16:37:00+00:00" - - -@freeze_time("2021-11-04 17:37:00", tz_offset=-1) -async def test_quirks_write_attr_button( - hass: HomeAssistant, custom_button_device -) -> None: - """Test ZHA button platform.""" - - zha_device, cluster = custom_button_device - assert cluster is not None - entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="feed") - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNKNOWN - assert cluster.get(cluster.AttributeDefs.feed.name) == 0 - - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - await hass.services.async_call( - DOMAIN, - SERVICE_PRESS, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - await hass.async_block_till_done() - assert cluster.write_attributes.mock_calls == [ - call({cluster.AttributeDefs.feed.name: 2}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == "2021-11-04T16:37:00+00:00" - assert cluster.get(cluster.AttributeDefs.feed.name) == 2 diff --git a/tests/components/zha/test_climate.py b/tests/components/zha/test_climate.py index 32ef08fcd96..7b94db51d04 100644 --- a/tests/components/zha/test_climate.py +++ b/tests/components/zha/test_climate.py @@ -1,17 +1,17 @@ """Test ZHA climate.""" from typing import Literal -from unittest.mock import call, patch +from unittest.mock import patch import pytest +from zha.application.platforms.climate.const import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION import zhaquirks.sinope.thermostat -from zhaquirks.sinope.thermostat import SinopeTechnologiesThermostatCluster import zhaquirks.tuya.ts0601_trv import zigpy.profiles +from zigpy.profiles import zha import zigpy.types import zigpy.zcl.clusters from zigpy.zcl.clusters.hvac import Thermostat -import zigpy.zcl.foundation as zcl_f from homeassistant.components.climate import ( ATTR_CURRENT_TEMPERATURE, @@ -28,10 +28,6 @@ from homeassistant.components.climate import ( FAN_LOW, FAN_ON, PRESET_AWAY, - PRESET_BOOST, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -39,13 +35,11 @@ from homeassistant.components.climate import ( HVACAction, HVACMode, ) -from homeassistant.components.zha.climate import HVAC_MODE_2_SYSTEM, SEQ_OF_OPERATION -from homeassistant.components.zha.core.const import ( - PRESET_COMPLEX, - PRESET_SCHEDULE, - PRESET_TEMP_MANUAL, +from homeassistant.components.zha.helpers import ( + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.components.zha.core.device import ZHADevice from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -53,15 +47,15 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError -from .common import async_enable_traffic, find_entity_id, send_attributes_report +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE CLIMATE = { 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -74,8 +68,8 @@ CLIMATE = { CLIMATE_FAN = { 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.THERMOSTAT, SIG_EP_INPUT: [ zigpy.zcl.clusters.general.Basic.cluster_id, zigpy.zcl.clusters.general.Identify.cluster_id, @@ -108,72 +102,7 @@ CLIMATE_SINOPE = { }, } -CLIMATE_ZEN = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.hvac.Fan.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - -CLIMATE_MOES = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - -CLIMATE_BECA = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SMART_PLUG, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.Scenes.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Time.cluster_id, - zigpy.zcl.clusters.general.Ota.cluster_id, - ], - } -} - -CLIMATE_ZONNSMART = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - zigpy.zcl.clusters.hvac.UserInterface.cluster_id, - 61148, - ], - SIG_EP_OUTPUT: [zigpy.zcl.clusters.general.Ota.cluster_id], - } -} - MANUF_SINOPE = "Sinope Technologies" -MANUF_ZEN = "Zen Within" -MANUF_MOES = "_TZE200_ckud7u2l" -MANUF_BECA = "_TZE200_b6wax7g0" -MANUF_ZONNSMART = "_TZE200_hue3yfsn" ZCL_ATTR_PLUG = { "abs_min_heat_setpoint_limit": 800, @@ -218,22 +147,22 @@ def climate_platform_only(): @pytest.fixture -def device_climate_mock(hass, zigpy_device_mock, zha_device_joined): +def device_climate_mock(hass: HomeAssistant, setup_zha, zigpy_device_mock): """Test regular thermostat device.""" async def _dev(clusters, plug=None, manuf=None, quirk=None): - if plug is None: - plugged_attrs = ZCL_ATTR_PLUG - else: - plugged_attrs = {**ZCL_ATTR_PLUG, **plug} - + plugged_attrs = ZCL_ATTR_PLUG if plug is None else {**ZCL_ATTR_PLUG, **plug} zigpy_device = zigpy_device_mock(clusters, manufacturer=manuf, quirk=quirk) zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 zigpy_device.endpoints[1].thermostat.PLUGGED_ATTR_READS = plugged_attrs - zha_device = await zha_device_joined(zigpy_device) - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - return zha_device + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + return gateway_proxy.get_device_proxy(zigpy_device.ieee) return _dev @@ -268,44 +197,6 @@ async def device_climate_sinope(device_climate_mock): ) -@pytest.fixture -async def device_climate_zen(device_climate_mock): - """Zen Within thermostat.""" - - return await device_climate_mock(CLIMATE_ZEN, manuf=MANUF_ZEN) - - -@pytest.fixture -async def device_climate_moes(device_climate_mock): - """MOES thermostat.""" - - return await device_climate_mock( - CLIMATE_MOES, manuf=MANUF_MOES, quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1 - ) - - -@pytest.fixture -async def device_climate_beca(device_climate_mock) -> ZHADevice: - """Beca thermostat.""" - - return await device_climate_mock( - CLIMATE_BECA, - manuf=MANUF_BECA, - quirk=zhaquirks.tuya.ts0601_trv.MoesHY368_Type1new, - ) - - -@pytest.fixture -async def device_climate_zonnsmart(device_climate_mock): - """ZONNSMART thermostat.""" - - return await device_climate_mock( - CLIMATE_ZONNSMART, - manuf=MANUF_ZONNSMART, - quirk=zhaquirks.tuya.ts0601_trv.ZonnsmartTV01_ZG, - ) - - def test_sequence_mappings() -> None: """Test correct mapping between control sequence -> HVAC Mode -> Sysmode.""" @@ -318,7 +209,7 @@ def test_sequence_mappings() -> None: async def test_climate_local_temperature(hass: HomeAssistant, device_climate) -> None: """Test local temperature.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -334,7 +225,7 @@ async def test_climate_hvac_action_running_state( ) -> None: """Test hvac action via running state.""" - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat + thrm_cluster = device_climate_sinope.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) sensor_entity_id = find_entity_id( Platform.SENSOR, device_climate_sinope, hass, "hvac" @@ -394,101 +285,12 @@ async def test_climate_hvac_action_running_state( assert hvac_sensor_state.state == HVACAction.FAN -async def test_climate_hvac_action_running_state_zen( - hass: HomeAssistant, device_climate_zen -) -> None: - """Test Zen hvac action via running state.""" - - thrm_cluster = device_climate_zen.device.endpoints[1].thermostat - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zen, hass) - sensor_entity_id = find_entity_id( - Platform.SENSOR, device_climate_zen, hass, "hvac_action" - ) - - state = hass.states.get(entity_id) - assert ATTR_HVAC_ACTION not in state.attributes - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == "unknown" - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.COOLING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.HEATING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_2nd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Cool_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.COOLING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.COOLING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Fan_3rd_Stage_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.FAN - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.FAN - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Heat_State_On} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.HEATING - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.HEATING - - await send_attributes_report( - hass, thrm_cluster, {0x0029: Thermostat.RunningState.Idle} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.OFF - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.OFF - - await send_attributes_report( - hass, thrm_cluster, {0x001C: Thermostat.SystemMode.Heat} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_HVAC_ACTION] == HVACAction.IDLE - hvac_sensor_state = hass.states.get(sensor_entity_id) - assert hvac_sensor_state.state == HVACAction.IDLE - - async def test_climate_hvac_action_pi_demand( hass: HomeAssistant, device_climate ) -> None: """Test hvac action based on pi_heating/cooling_demand attrs.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -537,7 +339,7 @@ async def test_hvac_mode( ) -> None: """Test HVAC mode.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -714,7 +516,7 @@ async def test_set_hvac_mode( ) -> None: """Test setting hvac mode.""" - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) state = hass.states.get(entity_id) @@ -753,134 +555,11 @@ async def test_set_hvac_mode( } -async def test_preset_setting(hass: HomeAssistant, device_climate_sinope) -> None: - """Test preset setting.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - # unsuccessful occupancy change - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [ - zcl_f.WriteAttributesStatusRecord( - status=zcl_f.Status.FAILURE, - attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, - ) - ] - ) - ] - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} - - # successful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] - ) - ] - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 0} - - # unsuccessful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [ - zcl_f.WriteAttributesStatusRecord( - status=zcl_f.Status.FAILURE, - attrid=SinopeTechnologiesThermostatCluster.AttributeDefs.set_occupancy.id, - ) - ] - ) - ] - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} - - # successful occupancy change - thrm_cluster.write_attributes.reset_mock() - thrm_cluster.write_attributes.return_value = [ - zcl_f.WriteAttributesResponse( - [zcl_f.WriteAttributesStatusRecord(status=zcl_f.Status.SUCCESS)] - ) - ] - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 1 - assert thrm_cluster.write_attributes.call_args[0][0] == {"set_occupancy": 1} - - -async def test_preset_setting_invalid( - hass: HomeAssistant, device_climate_sinope -) -> None: - """Test invalid preset setting.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - with pytest.raises(ServiceValidationError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "invalid_preset"}, - blocking=True, - ) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - assert thrm_cluster.write_attributes.call_count == 0 - - async def test_set_temperature_hvac_mode(hass: HomeAssistant, device_climate) -> None: """Test setting HVAC mode in temperature service call.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.OFF @@ -922,7 +601,7 @@ async def test_set_temperature_heat_cool( quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL @@ -1008,7 +687,7 @@ async def test_set_temperature_heat(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT @@ -1087,7 +766,7 @@ async def test_set_temperature_cool(hass: HomeAssistant, device_climate_mock) -> quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.COOL @@ -1172,7 +851,7 @@ async def test_set_temperature_wrong_mode( manuf=MANUF_SINOPE, ) entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass) - thrm_cluster = device_climate.device.endpoints[1].thermostat + thrm_cluster = device_climate.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert state.state == HVACMode.DRY @@ -1191,38 +870,11 @@ async def test_set_temperature_wrong_mode( assert thrm_cluster.write_attributes.await_count == 0 -async def test_occupancy_reset(hass: HomeAssistant, device_climate_sinope) -> None: - """Test away preset reset.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass) - thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - thrm_cluster.write_attributes.reset_mock() - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - - await send_attributes_report( - hass, thrm_cluster, {"occupied_heating_setpoint": zigpy.types.uint16_t(1950)} - ) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - async def test_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - thrm_cluster = device_climate_fan.device.endpoints[1].thermostat + thrm_cluster = device_climate_fan.device.device.endpoints[1].thermostat state = hass.states.get(entity_id) assert set(state.attributes[ATTR_FAN_MODES]) == {FAN_AUTO, FAN_ON} @@ -1253,7 +905,7 @@ async def test_set_fan_mode_not_supported( """Test fan setting unsupported mode.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.device.endpoints[1].fan with pytest.raises(ServiceValidationError): await hass.services.async_call( @@ -1269,7 +921,7 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: """Test fan mode setting.""" entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass) - fan_cluster = device_climate_fan.device.endpoints[1].fan + fan_cluster = device_climate_fan.device.device.endpoints[1].fan state = hass.states.get(entity_id) assert state.attributes[ATTR_FAN_MODE] == FAN_AUTO @@ -1292,309 +944,3 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None: ) assert fan_cluster.write_attributes.await_count == 1 assert fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} - - -async def test_set_moes_preset(hass: HomeAssistant, device_climate_moes) -> None: - """Test setting preset for moes trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) - thrm_cluster = device_climate_moes.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_AWAY}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 0 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 1 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMFORT}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 3 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_ECO}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 4 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_BOOST}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 5 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_COMPLEX}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 6 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 2 - } - - -async def test_set_moes_operation_mode( - hass: HomeAssistant, device_climate_moes -) -> None: - """Test setting preset for moes trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass) - thrm_cluster = device_climate_moes.device.endpoints[1].thermostat - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_AWAY - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMFORT - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_ECO - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 5}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_BOOST - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 6}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_COMPLEX - - -@pytest.mark.parametrize( - ("preset_attr", "preset_mode"), - [ - (0, PRESET_AWAY), - (1, PRESET_SCHEDULE), - # pylint: disable-next=fixme - # (2, PRESET_NONE), # TODO: why does this not work? - (4, PRESET_ECO), - (5, PRESET_BOOST), - (7, PRESET_TEMP_MANUAL), - ], -) -async def test_beca_operation_mode_update( - hass: HomeAssistant, - device_climate_beca: ZHADevice, - preset_attr: int, - preset_mode: str, -) -> None: - """Test beca trv operation mode attribute update.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_beca, hass) - thrm_cluster = device_climate_beca.device.endpoints[1].thermostat - - # Test sending an attribute report - await send_attributes_report(hass, thrm_cluster, {"operation_preset": preset_attr}) - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == preset_mode - - # Test setting the preset - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: preset_mode}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.mock_calls == [ - call( - {"operation_preset": preset_attr}, - manufacturer=device_climate_beca.manufacturer_code, - ) - ] - - -async def test_set_zonnsmart_preset( - hass: HomeAssistant, device_climate_zonnsmart -) -> None: - """Test setting preset from homeassistant for zonnsmart trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) - thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_SCHEDULE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 0 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "holiday"}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 3 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: "frost protect"}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 2 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - assert thrm_cluster.write_attributes.call_args_list[1][0][0] == { - "operation_preset": 4 - } - - thrm_cluster.write_attributes.reset_mock() - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: entity_id, ATTR_PRESET_MODE: PRESET_NONE}, - blocking=True, - ) - - assert thrm_cluster.write_attributes.await_count == 1 - assert thrm_cluster.write_attributes.call_args_list[0][0][0] == { - "operation_preset": 1 - } - - -async def test_set_zonnsmart_operation_mode( - hass: HomeAssistant, device_climate_zonnsmart -) -> None: - """Test setting preset from trv for zonnsmart trv.""" - - entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass) - thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_SCHEDULE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 1}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == PRESET_NONE - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 2}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "holiday" - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 3}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "holiday" - - await send_attributes_report(hass, thrm_cluster, {"operation_preset": 4}) - - state = hass.states.get(entity_id) - assert state.attributes[ATTR_PRESET_MODE] == "frost protect" diff --git a/tests/components/zha/test_cluster_handlers.py b/tests/components/zha/test_cluster_handlers.py deleted file mode 100644 index 655a36a2492..00000000000 --- a/tests/components/zha/test_cluster_handlers.py +++ /dev/null @@ -1,1009 +0,0 @@ -"""Test ZHA Core cluster handlers.""" - -from collections.abc import Callable -import logging -import math -import threading -from types import NoneType -from unittest import mock -from unittest.mock import AsyncMock, patch - -import pytest -import zigpy.device -import zigpy.endpoint -from zigpy.endpoint import Endpoint as ZigpyEndpoint -import zigpy.profiles.zha -import zigpy.quirks as zigpy_quirks -import zigpy.types as t -from zigpy.zcl import foundation -import zigpy.zcl.clusters -from zigpy.zcl.clusters import CLUSTERS_BY_ID -import zigpy.zdo.types as zdo_t - -from homeassistant.components.zha.core import cluster_handlers, registries -from homeassistant.components.zha.core.cluster_handlers.lighting import ( - ColorClusterHandler, -) -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.endpoint import Endpoint -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError - -from .common import make_zcl_header -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -from tests.common import async_capture_events - - -@pytest.fixture(autouse=True) -def disable_platform_only(): - """Disable platforms to speed up tests.""" - with patch("homeassistant.components.zha.PLATFORMS", []): - yield - - -@pytest.fixture -def ieee(): - """IEEE fixture.""" - return t.EUI64.deserialize(b"ieeeaddr")[0] - - -@pytest.fixture -def nwk(): - """NWK fixture.""" - return t.NWK(0xBEEF) - - -@pytest.fixture -async def zha_gateway(hass, setup_zha): - """Return ZhaGateway fixture.""" - await setup_zha() - return get_zha_gateway(hass) - - -@pytest.fixture -def zigpy_coordinator_device(zigpy_device_mock): - """Coordinator device fixture.""" - - coordinator = zigpy_device_mock( - {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - with patch.object(coordinator, "add_to_group", AsyncMock(return_value=[0])): - yield coordinator - - -@pytest.fixture -def endpoint(zigpy_coordinator_device): - """Endpoint fixture.""" - endpoint_mock = mock.MagicMock(spec_set=Endpoint) - endpoint_mock.zigpy_endpoint.device.application.get_device.return_value = ( - zigpy_coordinator_device - ) - type(endpoint_mock.device).skip_configuration = mock.PropertyMock( - return_value=False - ) - endpoint_mock.device.hass.loop_thread_id = threading.get_ident() - endpoint_mock.id = 1 - return endpoint_mock - - -@pytest.fixture -def poll_control_ch(endpoint, zigpy_device_mock): - """Poll control cluster handler fixture.""" - cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id - ).get(None) - return cluster_handler_class(cluster, endpoint) - - -@pytest.fixture -async def poll_control_device(zha_device_restored, zigpy_device_mock): - """Poll control device fixture.""" - cluster_id = zigpy.zcl.clusters.general.PollControl.cluster_id - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - return await zha_device_restored(zigpy_dev) - - -@pytest.mark.parametrize( - ("cluster_id", "bind_count", "attrs"), - [ - (zigpy.zcl.clusters.general.Basic.cluster_id, 0, {}), - ( - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - 1, - {"battery_voltage", "battery_percentage_remaining"}, - ), - ( - zigpy.zcl.clusters.general.DeviceTemperature.cluster_id, - 1, - {"current_temperature"}, - ), - (zigpy.zcl.clusters.general.Identify.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.Groups.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.Scenes.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.OnOff.cluster_id, 1, {"on_off"}), - (zigpy.zcl.clusters.general.OnOffConfiguration.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.LevelControl.cluster_id, 1, {"current_level"}), - (zigpy.zcl.clusters.general.Alarms.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.AnalogInput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.AnalogOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.AnalogValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.BinaryOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.BinaryValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateInput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateOutput.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.MultistateValue.cluster_id, 1, {"present_value"}), - (zigpy.zcl.clusters.general.Commissioning.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.Partition.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.Ota.cluster_id, 0, {}), - (zigpy.zcl.clusters.general.PowerProfile.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.ApplianceControl.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.PollControl.cluster_id, 1, {}), - (zigpy.zcl.clusters.general.GreenPowerProxy.cluster_id, 0, {}), - (zigpy.zcl.clusters.closures.DoorLock.cluster_id, 1, {"lock_state"}), - ( - zigpy.zcl.clusters.hvac.Thermostat.cluster_id, - 1, - { - "local_temperature", - "occupied_cooling_setpoint", - "occupied_heating_setpoint", - "unoccupied_cooling_setpoint", - "unoccupied_heating_setpoint", - "running_mode", - "running_state", - "system_mode", - "occupancy", - "pi_cooling_demand", - "pi_heating_demand", - }, - ), - (zigpy.zcl.clusters.hvac.Fan.cluster_id, 1, {"fan_mode"}), - ( - zigpy.zcl.clusters.lighting.Color.cluster_id, - 1, - { - "current_x", - "current_y", - "color_temperature", - "current_hue", - "enhanced_current_hue", - "current_saturation", - }, - ), - ( - zigpy.zcl.clusters.measurement.IlluminanceMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.IlluminanceLevelSensing.cluster_id, - 1, - {"level_status"}, - ), - ( - zigpy.zcl.clusters.measurement.TemperatureMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.PressureMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.FlowMeasurement.cluster_id, - 1, - {"measured_value"}, - ), - ( - zigpy.zcl.clusters.measurement.RelativeHumidity.cluster_id, - 1, - {"measured_value"}, - ), - (zigpy.zcl.clusters.measurement.OccupancySensing.cluster_id, 1, {"occupancy"}), - ( - zigpy.zcl.clusters.smartenergy.Metering.cluster_id, - 1, - { - "instantaneous_demand", - "current_summ_delivered", - "current_tier1_summ_delivered", - "current_tier2_summ_delivered", - "current_tier3_summ_delivered", - "current_tier4_summ_delivered", - "current_tier5_summ_delivered", - "current_tier6_summ_delivered", - "current_summ_received", - "status", - }, - ), - ( - zigpy.zcl.clusters.homeautomation.ElectricalMeasurement.cluster_id, - 1, - { - "active_power", - "active_power_max", - "apparent_power", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - }, - ), - ], -) -async def test_in_cluster_handler_config( - cluster_id, bind_count, attrs, endpoint, zigpy_device_mock, zha_gateway -) -> None: - """Test ZHA core cluster handler configuration for input clusters.""" - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_INPUT: [cluster_id], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].in_clusters[cluster_id] - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None, cluster_handlers.ClusterHandler} - ).get(None) - cluster_handler = cluster_handler_class(cluster, endpoint) - - await cluster_handler.async_configure() - - assert cluster.bind.call_count == bind_count - assert cluster.configure_reporting.call_count == 0 - assert cluster.configure_reporting_multiple.call_count == math.ceil(len(attrs) / 3) - reported_attrs = { - a - for a in attrs - for attr in cluster.configure_reporting_multiple.call_args_list - for attrs in attr[0][0] - } - assert set(attrs) == reported_attrs - - -@pytest.mark.parametrize( - ("cluster_id", "bind_count"), - [ - (0x0000, 0), - (0x0001, 1), - (0x0002, 1), - (0x0003, 0), - (0x0004, 0), - (0x0005, 1), - (0x0006, 1), - (0x0007, 1), - (0x0008, 1), - (0x0009, 1), - (0x0015, 1), - (0x0016, 1), - (0x0019, 0), - (0x001A, 1), - (0x001B, 1), - (0x0020, 1), - (0x0021, 0), - (0x0101, 1), - (0x0202, 1), - (0x0300, 1), - (0x0400, 1), - (0x0402, 1), - (0x0403, 1), - (0x0405, 1), - (0x0406, 1), - (0x0702, 1), - (0x0B04, 1), - ], -) -async def test_out_cluster_handler_config( - cluster_id, bind_count, endpoint, zigpy_device_mock, zha_gateway -) -> None: - """Test ZHA core cluster handler configuration for output clusters.""" - zigpy_dev = zigpy_device_mock( - {1: {SIG_EP_OUTPUT: [cluster_id], SIG_EP_INPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - cluster = zigpy_dev.endpoints[1].out_clusters[cluster_id] - cluster.bind_only = True - cluster_handler_class = registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.get( - cluster_id, {None: cluster_handlers.ClusterHandler} - ).get(None) - cluster_handler = cluster_handler_class(cluster, endpoint) - - await cluster_handler.async_configure() - - assert cluster.bind.call_count == bind_count - assert cluster.configure_reporting.call_count == 0 - - -def test_cluster_handler_registry() -> None: - """Test ZIGBEE cluster handler Registry.""" - - # get all quirk ID from zigpy quirks registry - all_quirk_ids = {} - for cluster_id in CLUSTERS_BY_ID: - all_quirk_ids[cluster_id] = {None} - # pylint: disable-next=too-many-nested-blocks - for manufacturer in zigpy_quirks._DEVICE_REGISTRY.registry.values(): - for model_quirk_list in manufacturer.values(): - for quirk in model_quirk_list: - quirk_id = getattr(quirk, zha_const.ATTR_QUIRK_ID, None) - device_description = getattr(quirk, "replacement", None) or getattr( - quirk, "signature", None - ) - - for endpoint in device_description["endpoints"].values(): - cluster_ids = set() - if "input_clusters" in endpoint: - cluster_ids.update(endpoint["input_clusters"]) - if "output_clusters" in endpoint: - cluster_ids.update(endpoint["output_clusters"]) - for cluster_id in cluster_ids: - if not isinstance(cluster_id, int): - cluster_id = cluster_id.cluster_id - if cluster_id not in all_quirk_ids: - all_quirk_ids[cluster_id] = {None} - all_quirk_ids[cluster_id].add(quirk_id) - - # pylint: disable-next=undefined-loop-variable - del quirk, model_quirk_list, manufacturer - - for ( - cluster_id, - cluster_handler_classes, - ) in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.items(): - assert isinstance(cluster_id, int) - assert 0 <= cluster_id <= 0xFFFF - assert cluster_id in all_quirk_ids - assert isinstance(cluster_handler_classes, dict) - for quirk_id, cluster_handler in cluster_handler_classes.items(): - assert isinstance(quirk_id, (NoneType, str)) - assert issubclass(cluster_handler, cluster_handlers.ClusterHandler) - assert quirk_id in all_quirk_ids[cluster_id] - - -def test_epch_unclaimed_cluster_handlers(cluster_handler) -> None: - """Test unclaimed cluster handlers.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - - ep_cluster_handlers = Endpoint( - mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) - ) - all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - with mock.patch.dict( - ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True - ): - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 in available - assert ch_2 in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] = ch_2 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 in available - assert ch_2 not in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] = ch_1 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 not in available - assert ch_2 not in available - assert ch_3 in available - - ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] = ch_3 - available = ep_cluster_handlers.unclaimed_cluster_handlers() - assert ch_1 not in available - assert ch_2 not in available - assert ch_3 not in available - - -def test_epch_claim_cluster_handlers(cluster_handler) -> None: - """Test cluster handler claiming.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - - ep_cluster_handlers = Endpoint( - mock.MagicMock(spec_set=ZigpyEndpoint), mock.MagicMock(spec_set=ZHADevice) - ) - all_cluster_handlers = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - with mock.patch.dict( - ep_cluster_handlers.all_cluster_handlers, all_cluster_handlers, clear=True - ): - assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_2.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers - - ep_cluster_handlers.claim_cluster_handlers([ch_2]) - assert ch_1.id not in ep_cluster_handlers.claimed_cluster_handlers - assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 - assert ch_3.id not in ep_cluster_handlers.claimed_cluster_handlers - - ep_cluster_handlers.claim_cluster_handlers([ch_3, ch_1]) - assert ch_1.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_1.id] is ch_1 - assert ch_2.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_2.id] is ch_2 - assert ch_3.id in ep_cluster_handlers.claimed_cluster_handlers - assert ep_cluster_handlers.claimed_cluster_handlers[ch_3.id] is ch_3 - assert "1:0x0300" in ep_cluster_handlers.claimed_cluster_handlers - - -@mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -def test_ep_all_cluster_handlers(m1, zha_device_mock: Callable[..., ZHADevice]) -> None: - """Test Endpoint adding all cluster handlers.""" - zha_device = zha_device_mock( - { - 1: { - SIG_EP_INPUT: [0, 1, 6, 8], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - }, - 2: { - SIG_EP_INPUT: [0, 1, 6, 8, 768], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: 0x0000, - }, - } - ) - assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0000" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0001" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0006" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0008" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0000" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0001" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0006" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0008" not in zha_device._endpoints[2].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - -@mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.add_client_cluster_handlers" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -def test_cluster_handler_power_config( - m1, zha_device_mock: Callable[..., ZHADevice] -) -> None: - """Test that cluster handlers only get a single power cluster handler.""" - in_clusters = [0, 1, 6, 8] - zha_device = zha_device_mock( - { - 1: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - 2: { - SIG_EP_INPUT: [*in_clusters, 768], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: 0x0000, - }, - } - ) - assert "1:0x0000" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0001" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0006" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0008" in zha_device._endpoints[1].all_cluster_handlers - assert "1:0x0300" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0000" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0006" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0008" in zha_device._endpoints[2].all_cluster_handlers - assert "2:0x0300" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - zha_device = zha_device_mock( - { - 1: {SIG_EP_INPUT: [], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - 2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}, - } - ) - assert "1:0x0001" not in zha_device._endpoints[1].all_cluster_handlers - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - zha_device = zha_device_mock( - {2: {SIG_EP_INPUT: in_clusters, SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x0000}} - ) - assert "2:0x0001" in zha_device._endpoints[2].all_cluster_handlers - - zha_device.async_cleanup_handles() - - -async def test_ep_cluster_handlers_configure(cluster_handler) -> None: - """Test unclaimed cluster handlers.""" - - ch_1 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_2 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_3 = cluster_handler(zha_const.CLUSTER_HANDLER_COLOR, 768) - ch_3.async_configure = AsyncMock(side_effect=TimeoutError) - ch_3.async_initialize = AsyncMock(side_effect=TimeoutError) - ch_4 = cluster_handler(zha_const.CLUSTER_HANDLER_ON_OFF, 6) - ch_5 = cluster_handler(zha_const.CLUSTER_HANDLER_LEVEL, 8) - ch_5.async_configure = AsyncMock(side_effect=TimeoutError) - ch_5.async_initialize = AsyncMock(side_effect=TimeoutError) - - endpoint_mock = mock.MagicMock(spec_set=ZigpyEndpoint) - type(endpoint_mock).in_clusters = mock.PropertyMock(return_value={}) - type(endpoint_mock).out_clusters = mock.PropertyMock(return_value={}) - endpoint = Endpoint.new(endpoint_mock, mock.MagicMock(spec_set=ZHADevice)) - - claimed = {ch_1.id: ch_1, ch_2.id: ch_2, ch_3.id: ch_3} - client_handlers = {ch_4.id: ch_4, ch_5.id: ch_5} - - with ( - mock.patch.dict(endpoint.claimed_cluster_handlers, claimed, clear=True), - mock.patch.dict(endpoint.client_cluster_handlers, client_handlers, clear=True), - ): - await endpoint.async_configure() - await endpoint.async_initialize(mock.sentinel.from_cache) - - for ch in (*claimed.values(), *client_handlers.values()): - assert ch.async_initialize.call_count == 1 - assert ch.async_initialize.await_count == 1 - assert ch.async_initialize.call_args[0][0] is mock.sentinel.from_cache - assert ch.async_configure.call_count == 1 - assert ch.async_configure.await_count == 1 - - assert ch_3.debug.call_count == 2 - assert ch_5.debug.call_count == 2 - - -async def test_poll_control_configure(poll_control_ch) -> None: - """Test poll control cluster handler configuration.""" - await poll_control_ch.async_configure() - assert poll_control_ch.cluster.write_attributes.call_count == 1 - assert poll_control_ch.cluster.write_attributes.call_args[0][0] == { - "checkin_interval": poll_control_ch.CHECKIN_INTERVAL - } - - -async def test_poll_control_checkin_response(poll_control_ch) -> None: - """Test poll control cluster handler checkin response.""" - rsp_mock = AsyncMock() - set_interval_mock = AsyncMock() - fast_poll_mock = AsyncMock() - cluster = poll_control_ch.cluster - patch_1 = mock.patch.object(cluster, "checkin_response", rsp_mock) - patch_2 = mock.patch.object(cluster, "set_long_poll_interval", set_interval_mock) - patch_3 = mock.patch.object(cluster, "fast_poll_stop", fast_poll_mock) - - with patch_1, patch_2, patch_3: - await poll_control_ch.check_in_response(33) - - assert rsp_mock.call_count == 1 - assert set_interval_mock.call_count == 1 - assert fast_poll_mock.call_count == 1 - - await poll_control_ch.check_in_response(33) - assert cluster.endpoint.request.call_count == 3 - assert cluster.endpoint.request.await_count == 3 - assert cluster.endpoint.request.call_args_list[0][0][1] == 33 - assert cluster.endpoint.request.call_args_list[0][0][0] == 0x0020 - assert cluster.endpoint.request.call_args_list[1][0][0] == 0x0020 - - -async def test_poll_control_cluster_command( - hass: HomeAssistant, poll_control_device -) -> None: - """Test poll control cluster handler response to cluster command.""" - checkin_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - events = async_capture_events(hass, zha_const.ZHA_EVENT) - - with mock.patch.object(poll_control_ch, "check_in_response", checkin_mock): - tsn = 22 - hdr = make_zcl_header(0, global_command=False, tsn=tsn) - assert not events - cluster.handle_message( - hdr, [mock.sentinel.args, mock.sentinel.args2, mock.sentinel.args3] - ) - await hass.async_block_till_done() - - assert checkin_mock.call_count == 1 - assert checkin_mock.await_count == 1 - assert checkin_mock.await_args[0][0] == tsn - assert len(events) == 1 - data = events[0].data - assert data["command"] == "checkin" - assert data["args"][0] is mock.sentinel.args - assert data["args"][1] is mock.sentinel.args2 - assert data["args"][2] is mock.sentinel.args3 - assert data["unique_id"] == "00:11:22:33:44:55:66:77:1:0x0020" - assert data["device_id"] == poll_control_device.device_id - - -async def test_poll_control_ignore_list( - hass: HomeAssistant, poll_control_device -) -> None: - """Test poll control cluster handler ignore list.""" - set_long_poll_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 1 - - set_long_poll_mock.reset_mock() - poll_control_ch.skip_manufacturer_id(4151) - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 0 - - -async def test_poll_control_ikea(hass: HomeAssistant, poll_control_device) -> None: - """Test poll control cluster handler ignore list for ikea.""" - set_long_poll_mock = AsyncMock() - poll_control_ch = poll_control_device._endpoints[1].all_cluster_handlers["1:0x0020"] - cluster = poll_control_ch.cluster - - poll_control_device.device.node_desc.manufacturer_code = 4476 - with mock.patch.object(cluster, "set_long_poll_interval", set_long_poll_mock): - await poll_control_ch.check_in_response(33) - - assert set_long_poll_mock.call_count == 0 - - -@pytest.fixture -def zigpy_zll_device(zigpy_device_mock): - """ZLL device fixture.""" - - return zigpy_device_mock( - {1: {SIG_EP_INPUT: [0x1000], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - -async def test_zll_device_groups( - zigpy_zll_device, endpoint, zigpy_coordinator_device -) -> None: - """Test adding coordinator to ZLL groups.""" - - cluster = zigpy_zll_device.endpoints[1].lightlink - cluster_handler = cluster_handlers.lightlink.LightLinkClusterHandler( - cluster, endpoint - ) - - get_group_identifiers_rsp = zigpy.zcl.clusters.lightlink.LightLink.commands_by_name[ - "get_group_identifiers_rsp" - ].schema - - with patch.object( - cluster, - "command", - AsyncMock( - return_value=get_group_identifiers_rsp( - total=0, start_index=0, group_info_records=[] - ) - ), - ) as cmd_mock: - await cluster_handler.async_configure() - assert cmd_mock.await_count == 1 - assert ( - cluster.server_commands[cmd_mock.await_args[0][0]].name - == "get_group_identifiers" - ) - assert cluster.bind.call_count == 0 - assert zigpy_coordinator_device.add_to_group.await_count == 1 - assert zigpy_coordinator_device.add_to_group.await_args[0][0] == 0x0000 - - zigpy_coordinator_device.add_to_group.reset_mock() - group_1 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xABCD, 0x00) - group_2 = zigpy.zcl.clusters.lightlink.GroupInfoRecord(0xAABB, 0x00) - with patch.object( - cluster, - "command", - AsyncMock( - return_value=get_group_identifiers_rsp( - total=2, start_index=0, group_info_records=[group_1, group_2] - ) - ), - ) as cmd_mock: - await cluster_handler.async_configure() - assert cmd_mock.await_count == 1 - assert ( - cluster.server_commands[cmd_mock.await_args[0][0]].name - == "get_group_identifiers" - ) - assert cluster.bind.call_count == 0 - assert zigpy_coordinator_device.add_to_group.await_count == 2 - assert ( - zigpy_coordinator_device.add_to_group.await_args_list[0][0][0] - == group_1.group_id - ) - assert ( - zigpy_coordinator_device.add_to_group.await_args_list[1][0][0] - == group_2.group_id - ) - - -@mock.patch( - "homeassistant.components.zha.core.discovery.PROBE.discover_entities", - mock.MagicMock(), -) -async def test_cluster_no_ep_attribute( - zha_device_mock: Callable[..., ZHADevice], -) -> None: - """Test cluster handlers for clusters without ep_attribute.""" - - zha_device = zha_device_mock( - {1: {SIG_EP_INPUT: [0x042E], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}}, - ) - - assert "1:0x042e" in zha_device._endpoints[1].all_cluster_handlers - assert zha_device._endpoints[1].all_cluster_handlers["1:0x042e"].name - - zha_device.async_cleanup_handles() - - -async def test_configure_reporting(hass: HomeAssistant, endpoint) -> None: - """Test setting up a cluster handler and configuring attribute reporting in two batches.""" - - class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): - BIND = True - REPORT_CONFIG = ( - # By name - cluster_handlers.AttrReportConfig(attr="current_x", config=(1, 60, 1)), - cluster_handlers.AttrReportConfig(attr="current_hue", config=(1, 60, 2)), - cluster_handlers.AttrReportConfig( - attr="color_temperature", config=(1, 60, 3) - ), - cluster_handlers.AttrReportConfig(attr="current_y", config=(1, 60, 4)), - ) - - mock_ep = mock.AsyncMock(spec_set=zigpy.endpoint.Endpoint) - mock_ep.device.zdo = AsyncMock() - - cluster = zigpy.zcl.clusters.lighting.Color(mock_ep) - cluster.bind = AsyncMock( - spec_set=cluster.bind, - return_value=[zdo_t.Status.SUCCESS], # ZDOCmd.Bind_rsp - ) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - cluster_handler = TestZigbeeClusterHandler(cluster, endpoint) - await cluster_handler.async_configure() - - # Since we request reporting for five attributes, we need to make two calls (3 + 1) - assert cluster.configure_reporting_multiple.mock_calls == [ - mock.call( - { - "current_x": (1, 60, 1), - "current_hue": (1, 60, 2), - "color_temperature": (1, 60, 3), - } - ), - mock.call( - { - "current_y": (1, 60, 4), - } - ), - ] - - -async def test_invalid_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that fails to match properly.""" - - class TestZigbeeClusterHandler(cluster_handlers.ClusterHandler): - REPORT_CONFIG = ( - cluster_handlers.AttrReportConfig(attr="missing_attr", config=(1, 60, 1)), - ) - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = None - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - # The cluster handler throws an error when matching this cluster - with pytest.raises(KeyError): - TestZigbeeClusterHandler(cluster, zha_endpoint) - - # And one is also logged at runtime - with ( - patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {None: TestZigbeeClusterHandler}, - ), - caplog.at_level(logging.WARNING), - ): - zha_endpoint.add_all_cluster_handlers() - - assert "missing_attr" in caplog.text - - -async def test_standard_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that matches a standard cluster.""" - - class TestZigbeeClusterHandler(ColorClusterHandler): - pass - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = None - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - with patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {"__test_quirk_id": TestZigbeeClusterHandler}, - ): - zha_endpoint.add_all_cluster_handlers() - - assert len(zha_endpoint.all_cluster_handlers) == 1 - assert isinstance( - list(zha_endpoint.all_cluster_handlers.values())[0], ColorClusterHandler - ) - - -async def test_quirk_id_cluster_handler( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test setting up a cluster handler that matches a standard cluster.""" - - class TestZigbeeClusterHandler(ColorClusterHandler): - pass - - mock_device = mock.AsyncMock(spec_set=zigpy.device.Device) - zigpy_ep = zigpy.endpoint.Endpoint(mock_device, endpoint_id=1) - - cluster = zigpy_ep.add_input_cluster(zigpy.zcl.clusters.lighting.Color.cluster_id) - cluster.configure_reporting_multiple = AsyncMock( - spec_set=cluster.configure_reporting_multiple, - return_value=[ - foundation.ConfigureReportingResponseRecord( - status=foundation.Status.SUCCESS - ) - ], - ) - - mock_zha_device = mock.AsyncMock(spec=ZHADevice) - mock_zha_device.quirk_id = "__test_quirk_id" - zha_endpoint = Endpoint(zigpy_ep, mock_zha_device) - - with patch.dict( - registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY[cluster.cluster_id], - {"__test_quirk_id": TestZigbeeClusterHandler}, - ): - zha_endpoint.add_all_cluster_handlers() - - assert len(zha_endpoint.all_cluster_handlers) == 1 - assert isinstance( - list(zha_endpoint.all_cluster_handlers.values())[0], TestZigbeeClusterHandler - ) - - -# parametrize side effects: -@pytest.mark.parametrize( - ("side_effect", "expected_error"), - [ - (zigpy.exceptions.ZigbeeException(), "Failed to send request"), - ( - zigpy.exceptions.ZigbeeException("Zigbee exception"), - "Failed to send request: Zigbee exception", - ), - (TimeoutError(), "Failed to send request: device did not respond"), - ], -) -async def test_retry_request( - side_effect: Exception | None, expected_error: str | None -) -> None: - """Test the `retry_request` decorator's handling of zigpy-internal exceptions.""" - - async def func(arg1: int, arg2: int) -> int: - assert arg1 == 1 - assert arg2 == 2 - - raise side_effect - - func = mock.AsyncMock(wraps=func) - decorated_func = cluster_handlers.retry_request(func) - - with pytest.raises(HomeAssistantError) as exc: - await decorated_func(1, arg2=2) - - assert func.await_count == 3 - assert isinstance(exc.value, HomeAssistantError) - assert str(exc.value) == expected_error - - -async def test_cluster_handler_naming() -> None: - """Test that all cluster handlers are named appropriately.""" - for client_cluster_handler in registries.CLIENT_CLUSTER_HANDLER_REGISTRY.values(): - assert issubclass(client_cluster_handler, cluster_handlers.ClientClusterHandler) - assert client_cluster_handler.__name__.endswith("ClientClusterHandler") - - for cluster_handler_dict in registries.ZIGBEE_CLUSTER_HANDLER_REGISTRY.values(): - for cluster_handler in cluster_handler_dict.values(): - assert not issubclass( - cluster_handler, cluster_handlers.ClientClusterHandler - ) - assert cluster_handler.__name__.endswith("ClusterHandler") diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index 0c8414f458f..f3104141269 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -9,6 +9,7 @@ import uuid import pytest import serial.tools.list_ports +from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH, SCHEMA_DEVICE @@ -21,13 +22,12 @@ from homeassistant.components import ssdp, usb, zeroconf from homeassistant.components.hassio import AddonState from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL from homeassistant.components.zha import config_flow, radio_manager -from homeassistant.components.zha.core.const import ( +from homeassistant.components.zha.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, DOMAIN, EZSP_OVERWRITE_EUI64, - RadioType, ) from homeassistant.components.zha.radio_manager import ProbeResult from homeassistant.config_entries import ( diff --git a/tests/components/zha/test_cover.py b/tests/components/zha/test_cover.py index 5f6dac885f2..afef2aab70f 100644 --- a/tests/components/zha/test_cover.py +++ b/tests/components/zha/test_cover.py @@ -1,12 +1,10 @@ """Test ZHA cover.""" -import asyncio from unittest.mock import patch import pytest -import zigpy.profiles.zha -import zigpy.types -from zigpy.zcl.clusters import closures, general +from zigpy.profiles import zha +from zigpy.zcl.clusters import closures import zigpy.zcl.foundation as zcl_f from homeassistant.components.cover import ( @@ -22,34 +20,27 @@ from homeassistant.components.cover import ( SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, SERVICE_STOP_COVER_TILT, - SERVICE_TOGGLE_COVER_TILT, ) -from homeassistant.components.zha.core.const import ZHA_EVENT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import ( - ATTR_COMMAND, STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, - STATE_UNAVAILABLE, Platform, ) -from homeassistant.core import CoreState, HomeAssistant, State +from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_component import async_update_entity -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - make_zcl_header, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import async_capture_events, mock_restore_cache - Default_Response = zcl_f.GENERAL_COMMANDS[zcl_f.GeneralCommand.Default_Response].schema @@ -68,135 +59,31 @@ def cover_platform_only(): yield -@pytest.fixture -def zigpy_cover_device(zigpy_device_mock): - """Zigpy cover device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [closures.WindowCovering.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_cover_remote(zigpy_device_mock): - """Zigpy cover remote device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_CONTROLLER, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [closures.WindowCovering.cluster_id], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_shade_device(zigpy_device_mock): - """Zigpy shade device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, - SIG_EP_INPUT: [ - closures.Shade.cluster_id, - general.LevelControl.cluster_id, - general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_keen_vent(zigpy_device_mock): - """Zigpy Keen Vent device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.LEVEL_CONTROLLABLE_OUTPUT, - SIG_EP_INPUT: [general.LevelControl.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock( - endpoints, manufacturer="Keen Home Inc", model="SV02-612-MP-1.3" - ) - - WCAttrs = closures.WindowCovering.AttributeDefs WCCmds = closures.WindowCovering.ServerCommandDefs WCT = closures.WindowCovering.WindowCoveringType WCCS = closures.WindowCovering.ConfigStatus -async def test_cover_non_tilt_initial_state( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: +async def test_cover(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA cover platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + } + }, + ) # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.window_covering_type.name: WCT.Drapery, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - assert ( - not zha_device.endpoints[1] - .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] - .inverted - ) - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OPEN - assert state.attributes[ATTR_CURRENT_POSITION] == 100 - - -async def test_cover( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering + cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_lift_percentage.name: 0, WCAttrs.current_position_tilt_percentage.name: 42, @@ -204,9 +91,17 @@ async def test_cover( WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), } update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) + assert entity_id is not None + assert ( - not zha_device.endpoints[1] + not zha_device_proxy.device.endpoints[1] .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] .inverted ) @@ -220,21 +115,7 @@ async def test_cover( in cluster.read_attributes.call_args[0][0] ) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 state = hass.states.get(entity_id) assert state assert state.state == STATE_OPEN @@ -440,61 +321,41 @@ async def test_cover( assert cluster.request.call_args[0][2].command.name == WCCmds.stop.name assert cluster.request.call_args[1]["expect_reply"] is True - # test rejoin - cluster.PLUGGED_ATTR_READS = {WCAttrs.current_position_lift_percentage.name: 0} - await async_test_rejoin(hass, zigpy_cover_device, [cluster], (1,)) - assert hass.states.get(entity_id).state == STATE_OPEN - - # test toggle - with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_TOGGLE_COVER_TILT, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster.request.call_count == 1 - assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == 0x08 - assert ( - cluster.request.call_args[0][2].command.name - == WCCmds.go_to_tilt_percentage.name - ) - assert cluster.request.call_args[0][3] == 100 - assert cluster.request.call_args[1]["expect_reply"] is True - async def test_cover_failures( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA cover platform failure cases.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_PROFILE: zha.PROFILE_ID, + SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + } + }, + ) # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering + cluster = zigpy_device.endpoints[1].window_covering cluster.PLUGGED_ATTR_READS = { WCAttrs.current_position_tilt_percentage.name: 42, WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, } update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.COVER, zha_device_proxy, hass) assert entity_id is not None - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # test update returned None - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - # test that the state has changed from unavailable to closed await send_attributes_report(hass, cluster, {0: 0, 8: 100, 1: 1}) assert hass.states.get(entity_id).state == STATE_CLOSED @@ -670,319 +531,3 @@ async def test_cover_failures( cluster.request.call_args[0][1] == closures.WindowCovering.ServerCommandDefs.stop.id ) - - -async def test_shade( - hass: HomeAssistant, zha_device_joined_restored, zigpy_shade_device -) -> None: - """Test ZHA cover platform for shade device type.""" - - # load up cover domain - zha_device = await zha_device_joined_restored(zigpy_shade_device) - - cluster_on_off = zigpy_shade_device.endpoints[1].on_off - cluster_level = zigpy_shade_device.endpoints[1].level - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test that the state has changed from unavailable to off - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # test to see if it opens - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: True, 1: 1}) - assert hass.states.get(entity_id).state == STATE_OPEN - - # close from UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.down_close.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_CLOSE_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0000 - assert hass.states.get(entity_id).state == STATE_OPEN - - with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0000 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command fails - assert ATTR_CURRENT_POSITION not in hass.states.get(entity_id).attributes - await send_attributes_report(hass, cluster_level, {0: 0}) - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.up_open.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # stop from UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=general.LevelControl.ServerCommandDefs.stop.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert ( - cluster_level.request.call_args[0][1] - == general.LevelControl.ServerCommandDefs.stop.id - ) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI succeeds - with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True - ) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert hass.states.get(entity_id).state == STATE_OPEN - - # set position UI command fails - with patch( - "zigpy.zcl.Cluster.request", - return_value=Default_Response( - command_id=closures.WindowCovering.ServerCommandDefs.go_to_lift_percentage.id, - status=zcl_f.Status.UNSUP_CLUSTER_COMMAND, - ), - ): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {"entity_id": entity_id, "position": 47}, - blocking=True, - ) - - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] == 0x0004 - assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 0 - - # set position UI success - with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_SET_COVER_POSITION, - {"entity_id": entity_id, "position": 47}, - blocking=True, - ) - assert cluster_level.request.call_count == 1 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] == 0x0004 - assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 47 - - # report position change - await send_attributes_report(hass, cluster_level, {8: 0, 0: 100, 1: 1}) - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == int( - 100 * 100 / 255 - ) - - # test rejoin - await async_test_rejoin( - hass, zigpy_shade_device, [cluster_level, cluster_on_off], (1,) - ) - assert hass.states.get(entity_id).state == STATE_OPEN - - # test cover stop - with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_STOP_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_level.request.call_count == 3 - assert cluster_level.request.call_args[0][0] is False - assert cluster_level.request.call_args[0][1] in (0x0003, 0x0007) - - -async def test_shade_restore_state( - hass: HomeAssistant, zha_device_restored, zigpy_shade_device -) -> None: - """Ensure states are restored on startup.""" - mock_restore_cache( - hass, - ( - State( - "cover.fakemanufacturer_fakemodel_shade", - STATE_OPEN, - {ATTR_CURRENT_POSITION: 50}, - ), - ), - ) - - hass.set_state(CoreState.starting) - - zha_device = await zha_device_restored(zigpy_shade_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - # test that the cover was created and that it is available - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 50 - - -async def test_cover_restore_state( - hass: HomeAssistant, zha_device_restored, zigpy_cover_device -) -> None: - """Ensure states are restored on startup.""" - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 50, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, - } - update_attribute_cache(cluster) - - hass.set_state(CoreState.starting) - - zha_device = await zha_device_restored(zigpy_cover_device) - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - # test that the cover was created and that it is available - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 - 50 - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_TILT_POSITION] == 100 - 42 - - -async def test_keen_vent( - hass: HomeAssistant, zha_device_joined_restored, zigpy_keen_vent -) -> None: - """Test keen vent.""" - - # load up cover domain - zha_device = await zha_device_joined_restored(zigpy_keen_vent) - - cluster_on_off = zigpy_keen_vent.endpoints[1].on_off - cluster_level = zigpy_keen_vent.endpoints[1].level - entity_id = find_entity_id(Platform.COVER, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test that the state has changed from unavailable to off - await send_attributes_report(hass, cluster_on_off, {8: 0, 0: False, 1: 1}) - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command fails - p1 = patch.object(cluster_on_off, "request", side_effect=TimeoutError) - p2 = patch.object(cluster_level, "request", return_value=[4, 0]) - - with p1, p2: - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - COVER_DOMAIN, - SERVICE_OPEN_COVER, - {"entity_id": entity_id}, - blocking=True, - ) - assert cluster_on_off.request.call_count == 3 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert cluster_level.request.call_count == 1 - assert hass.states.get(entity_id).state == STATE_CLOSED - - # open from UI command success - p1 = patch.object(cluster_on_off, "request", return_value=[1, 0]) - p2 = patch.object(cluster_level, "request", return_value=[4, 0]) - - with p1, p2: - await hass.services.async_call( - COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True - ) - await asyncio.sleep(0) - assert cluster_on_off.request.call_count == 1 - assert cluster_on_off.request.call_args[0][0] is False - assert cluster_on_off.request.call_args[0][1] == 0x0001 - assert cluster_level.request.call_count == 1 - assert hass.states.get(entity_id).state == STATE_OPEN - assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 100 - - -async def test_cover_remote( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_remote -) -> None: - """Test ZHA cover remote.""" - - # load up cover domain - await zha_device_joined_restored(zigpy_cover_remote) - - cluster = zigpy_cover_remote.endpoints[1].out_clusters[ - closures.WindowCovering.cluster_id - ] - zha_events = async_capture_events(hass, ZHA_EVENT) - - # up command - hdr = make_zcl_header(0, global_command=False) - cluster.handle_message(hdr, []) - await hass.async_block_till_done() - - assert len(zha_events) == 1 - assert zha_events[0].data[ATTR_COMMAND] == "up_open" - - # down command - hdr = make_zcl_header(1, global_command=False) - cluster.handle_message(hdr, []) - await hass.async_block_till_done() - - assert len(zha_events) == 2 - assert zha_events[1].data[ATTR_COMMAND] == "down_close" diff --git a/tests/components/zha/test_device.py b/tests/components/zha/test_device.py deleted file mode 100644 index 87acdc5fd1c..00000000000 --- a/tests/components/zha/test_device.py +++ /dev/null @@ -1,363 +0,0 @@ -"""Test ZHA device switch.""" - -from datetime import timedelta -import logging -import time -from unittest import mock -from unittest.mock import patch - -import pytest -import zigpy.profiles.zha -import zigpy.types -from zigpy.zcl.clusters import general -import zigpy.zdo.types as zdo_t - -from homeassistant.components.zha.core.const import ( - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY, - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS, -) -from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform -from homeassistant.core import HomeAssistant -import homeassistant.helpers.device_registry as dr -import homeassistant.util.dt as dt_util - -from .common import async_enable_traffic, make_zcl_header -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -from tests.common import async_fire_time_changed - - -@pytest.fixture(autouse=True) -def required_platforms_only(): - """Only set up the required platform and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.DEVICE_TRACKER, - Platform.SENSOR, - Platform.SELECT, - Platform.SWITCH, - Platform.BINARY_SENSOR, - ), - ): - yield - - -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - - def _dev(with_basic_cluster_handler: bool = True, **kwargs): - in_clusters = [general.OnOff.cluster_id] - if with_basic_cluster_handler: - in_clusters.append(general.Basic.cluster_id) - - endpoints = { - 3: { - SIG_EP_INPUT: in_clusters, - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock(endpoints, **kwargs) - - return _dev - - -@pytest.fixture -def zigpy_device_mains(zigpy_device_mock): - """Device tracker zigpy device.""" - - def _dev(with_basic_cluster_handler: bool = True): - in_clusters = [general.OnOff.cluster_id] - if with_basic_cluster_handler: - in_clusters.append(general.Basic.cluster_id) - - endpoints = { - 3: { - SIG_EP_INPUT: in_clusters, - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" - ) - - return _dev - - -@pytest.fixture -def device_with_basic_cluster_handler(zigpy_device_mains): - """Return a ZHA device with a basic cluster handler present.""" - return zigpy_device_mains(with_basic_cluster_handler=True) - - -@pytest.fixture -def device_without_basic_cluster_handler(zigpy_device): - """Return a ZHA device without a basic cluster handler present.""" - return zigpy_device(with_basic_cluster_handler=False) - - -@pytest.fixture -async def ota_zha_device(zha_device_restored, zigpy_device_mock): - """ZHA device with OTA cluster fixture.""" - zigpy_dev = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: 0x1234, - } - }, - "00:11:22:33:44:55:66:77", - "test manufacturer", - "test model", - ) - - return await zha_device_restored(zigpy_dev) - - -def _send_time_changed(hass, seconds): - """Send a time changed event.""" - now = dt_util.utcnow() + timedelta(seconds=seconds) - async_fire_time_changed(hass, now) - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_success( - hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored -) -> None: - """Check device availability success on 1st try.""" - zha_device = await zha_device_restored(device_with_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - basic_ch = device_with_basic_cluster_handler.endpoints[3].basic - - basic_ch.read_attributes.reset_mock() - device_with_basic_cluster_handler.last_seen = None - assert zha_device.available is True - _send_time_changed(hass, zha_device.consider_unavailable_time + 2) - await hass.async_block_till_done() - assert zha_device.available is False - assert basic_ch.read_attributes.await_count == 0 - - device_with_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - _seens = [time.time(), device_with_basic_cluster_handler.last_seen] - - def _update_last_seen(*args, **kwargs): - device_with_basic_cluster_handler.last_seen = _seens.pop() - - basic_ch.read_attributes.side_effect = _update_last_seen - - # successfully ping zigpy device, but zha_device is not yet available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 1 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - # There was traffic from the device: pings, but not yet available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - # There was traffic from the device: don't try to ping, marked as available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_unsuccessful( - hass: HomeAssistant, device_with_basic_cluster_handler, zha_device_restored -) -> None: - """Check device availability all tries fail.""" - - zha_device = await zha_device_restored(device_with_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - basic_ch = device_with_basic_cluster_handler.endpoints[3].basic - - assert zha_device.available is True - assert basic_ch.read_attributes.await_count == 0 - - device_with_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - - # unsuccessfully ping zigpy device, but zha_device is still available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 1 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - # still no traffic, but zha_device is still available - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is True - - # not even trying to update, device is unavailable - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert basic_ch.read_attributes.await_count == 2 - assert basic_ch.read_attributes.await_args[0][0] == ["manufacturer"] - assert zha_device.available is False - - -@patch( - "homeassistant.components.zha.core.cluster_handlers.general.BasicClusterHandler.async_initialize", - new=mock.AsyncMock(), -) -async def test_check_available_no_basic_cluster_handler( - hass: HomeAssistant, - device_without_basic_cluster_handler, - zha_device_restored, - caplog: pytest.LogCaptureFixture, -) -> None: - """Check device availability for a device without basic cluster.""" - caplog.set_level(logging.DEBUG, logger="homeassistant.components.zha") - - zha_device = await zha_device_restored(device_without_basic_cluster_handler) - await async_enable_traffic(hass, [zha_device]) - - assert zha_device.available is True - - device_without_basic_cluster_handler.last_seen = ( - time.time() - zha_device.consider_unavailable_time - 2 - ) - - assert "does not have a mandatory basic cluster" not in caplog.text - _send_time_changed(hass, 91) - await hass.async_block_till_done() - assert zha_device.available is False - assert "does not have a mandatory basic cluster" in caplog.text - - -async def test_ota_sw_version( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, ota_zha_device -) -> None: - """Test device entry gets sw_version updated via OTA cluster handler.""" - - ota_ch = ota_zha_device._endpoints[1].client_cluster_handlers["1:0x0019"] - entry = device_registry.async_get(ota_zha_device.device_id) - assert entry.sw_version is None - - cluster = ota_ch.cluster - hdr = make_zcl_header(1, global_command=False) - sw_version = 0x2345 - cluster.handle_message(hdr, [1, 2, 3, sw_version, None]) - await hass.async_block_till_done() - entry = device_registry.async_get(ota_zha_device.device_id) - assert int(entry.sw_version, base=16) == sw_version - - -@pytest.mark.parametrize( - ("device", "last_seen_delta", "is_available"), - [ - ("zigpy_device", 0, True), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, - True, - ), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, - True, - ), - ( - "zigpy_device", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, - False, - ), - ("zigpy_device_mains", 0, True), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS - 2, - True, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_MAINS + 2, - False, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY - 2, - False, - ), - ( - "zigpy_device_mains", - CONF_DEFAULT_CONSIDER_UNAVAILABLE_BATTERY + 2, - False, - ), - ], -) -async def test_device_restore_availability( - hass: HomeAssistant, - request: pytest.FixtureRequest, - device, - last_seen_delta, - is_available, - zha_device_restored, -) -> None: - """Test initial availability for restored devices.""" - - zigpy_device = request.getfixturevalue(device)() - zha_device = await zha_device_restored( - zigpy_device, last_seen=time.time() - last_seen_delta - ) - entity_id = "switch.fakemanufacturer_fakemodel_switch" - - await hass.async_block_till_done() - # ensure the switch entity was created - assert hass.states.get(entity_id).state is not None - assert zha_device.available is is_available - if is_available: - assert hass.states.get(entity_id).state == STATE_OFF - else: - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - -async def test_device_is_active_coordinator( - hass: HomeAssistant, zha_device_joined, zigpy_device -) -> None: - """Test that the current coordinator is uniquely detected.""" - - current_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:11", nwk=0x0000) - current_coord_dev.node_desc = current_coord_dev.node_desc.replace( - logical_type=zdo_t.LogicalType.Coordinator - ) - - old_coord_dev = zigpy_device(ieee="aa:bb:cc:dd:ee:ff:00:12", nwk=0x0000) - old_coord_dev.node_desc = old_coord_dev.node_desc.replace( - logical_type=zdo_t.LogicalType.Coordinator - ) - - # The two coordinators have different IEEE addresses - assert current_coord_dev.ieee != old_coord_dev.ieee - - current_coordinator = await zha_device_joined(current_coord_dev) - stale_coordinator = await zha_device_joined(old_coord_dev) - - # Ensure the current ApplicationController's IEEE matches our coordinator's - current_coordinator.gateway.application_controller.state.node_info.ieee = ( - current_coord_dev.ieee - ) - - assert current_coordinator.is_active_coordinator - assert not stale_coordinator.is_active_coordinator diff --git a/tests/components/zha/test_device_action.py b/tests/components/zha/test_device_action.py index 13e9d789191..8bee821654d 100644 --- a/tests/components/zha/test_device_action.py +++ b/tests/components/zha/test_device_action.py @@ -1,23 +1,23 @@ """The test for ZHA device automation actions.""" -from unittest.mock import call, patch +from unittest.mock import patch import pytest from pytest_unordered import unordered -from zhaquirks.inovelli.VZM31SN import InovelliVZM31SNv11 -import zigpy.profiles.zha +from zigpy.profiles import zha from zigpy.zcl.clusters import general, security import zigpy.zcl.foundation as zcl_f from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.zha import DOMAIN +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from tests.common import async_get_device_automations, async_mock_service @@ -52,66 +52,37 @@ def required_platforms_only(): yield -@pytest.fixture -async def device_ias(hass, zigpy_device_mock, zha_device_joined_restored): - """IAS device fixture.""" +async def test_get_actions( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + setup_zha, + zigpy_device_mock, +) -> None: + """Test we get the expected actions from a ZHA device.""" - clusters = [general.Basic, security.IasZone, security.IasWd] - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [c.cluster_id for c in clusters], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - -@pytest.fixture -async def device_inovelli(hass, zigpy_device_mock, zha_device_joined): - """Inovelli device fixture.""" + await setup_zha() + gateway = get_zha_gateway(hass) zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - 0xFC31, + security.IasZone.cluster_id, + security.IasWd.cluster_id, ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DIMMABLE_LIGHT, + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, + SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ieee="00:1d:8f:08:0c:90:69:6b", - manufacturer="Inovelli", - model="VZM31-SN", - quirk=InovelliVZM31SNv11, + } ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - -async def test_get_actions( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - device_ias, -) -> None: - """Test we get the expected actions from a ZHA device.""" - - ieee_address = str(device_ias[0].ieee) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + ieee_address = str(zigpy_device.ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) siren_level_select = entity_registry.async_get( @@ -168,112 +139,40 @@ async def test_get_actions( assert actions == unordered(expected_actions) -async def test_get_inovelli_actions( +async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - entity_registry: er.EntityRegistry, - device_inovelli, -) -> None: - """Test we get the expected actions from a ZHA device.""" - - inovelli_ieee_address = str(device_inovelli[0].ieee) - inovelli_reg_device = device_registry.async_get_device( - identifiers={(DOMAIN, inovelli_ieee_address)} - ) - inovelli_button = entity_registry.async_get("button.inovelli_vzm31_sn_identify") - inovelli_light = entity_registry.async_get("light.inovelli_vzm31_sn_light") - - actions = await async_get_device_automations( - hass, DeviceAutomationType.ACTION, inovelli_reg_device.id - ) - - expected_actions = [ - { - "device_id": inovelli_reg_device.id, - "domain": DOMAIN, - "metadata": {}, - "type": "issue_all_led_effect", - }, - { - "device_id": inovelli_reg_device.id, - "domain": DOMAIN, - "metadata": {}, - "type": "issue_individual_led_effect", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.BUTTON, - "entity_id": inovelli_button.id, - "metadata": {"secondary": True}, - "type": "press", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "turn_off", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "turn_on", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "toggle", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "brightness_increase", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "brightness_decrease", - }, - { - "device_id": inovelli_reg_device.id, - "domain": Platform.LIGHT, - "entity_id": inovelli_light.id, - "metadata": {"secondary": False}, - "type": "flash", - }, - ] - - assert actions == unordered(expected_actions) - - -async def test_action( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, device_ias, device_inovelli + setup_zha, + zigpy_device_mock, ) -> None: """Test for executing a ZHA device action.""" - zigpy_device, zha_device = device_ias - inovelli_zigpy_device, inovelli_zha_device = device_inovelli + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + security.IasZone.cluster_id, + security.IasWd.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) zigpy_device.device_automation_triggers = { (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} } - ieee_address = str(zha_device.ieee) - inovelli_ieee_address = str(inovelli_zha_device.ieee) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + ieee_address = str(zigpy_device.ieee) reg_device = device_registry.async_get_device(identifiers={(DOMAIN, ieee_address)}) - inovelli_reg_device = device_registry.async_get_device( - identifiers={(DOMAIN, inovelli_ieee_address)} - ) - - cluster = inovelli_zigpy_device.endpoints[1].in_clusters[0xFC31] with patch( "zigpy.zcl.Cluster.request", @@ -298,25 +197,6 @@ async def test_action( "device_id": reg_device.id, "type": "warn", }, - { - "domain": DOMAIN, - "device_id": inovelli_reg_device.id, - "type": "issue_all_led_effect", - "effect_type": "Open_Close", - "duration": 5, - "level": 10, - "color": 41, - }, - { - "domain": DOMAIN, - "device_id": inovelli_reg_device.id, - "type": "issue_individual_led_effect", - "effect_type": "Falling", - "led_number": 1, - "duration": 5, - "level": 10, - "color": 41, - }, ], } ] @@ -326,7 +206,11 @@ async def test_action( await hass.async_block_till_done() calls = async_mock_service(hass, DOMAIN, "warning_device_warn") - cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] + cluster_handler = ( + gateway.get_device(zigpy_device.ieee) + .endpoints[1] + .client_cluster_handlers["1:0x0006"] + ) cluster_handler.zha_send_event(COMMAND_SINGLE, []) await hass.async_block_till_done() @@ -335,44 +219,41 @@ async def test_action( assert calls[0].service == "warning_device_warn" assert calls[0].data["ieee"] == ieee_address - assert len(cluster.request.mock_calls) == 2 - assert ( - call( - False, - cluster.commands_by_name["led_effect"].id, - cluster.commands_by_name["led_effect"].schema, - 6, - 41, - 10, - 5, - expect_reply=False, - manufacturer=4151, - tsn=None, - ) - in cluster.request.call_args_list - ) - assert ( - call( - False, - cluster.commands_by_name["individual_led_effect"].id, - cluster.commands_by_name["individual_led_effect"].schema, - 1, - 6, - 41, - 10, - 5, - expect_reply=False, - manufacturer=4151, - tsn=None, - ) - in cluster.request.call_args_list - ) - -async def test_invalid_zha_event_type(hass: HomeAssistant, device_ias) -> None: +async def test_invalid_zha_event_type( + hass: HomeAssistant, setup_zha, zigpy_device_mock +) -> None: """Test that unexpected types are not passed to `zha_send_event`.""" - zigpy_device, zha_device = device_ias - cluster_handler = zha_device._endpoints[1].client_cluster_handlers["1:0x0006"] + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + security.IasZone.cluster_id, + security.IasWd.cluster_id, + ], + SIG_EP_OUTPUT: [general.OnOff.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + zigpy_device.device_automation_triggers = { + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE} + } + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + cluster_handler = ( + gateway.get_device(zigpy_device.ieee) + .endpoints[1] + .client_cluster_handlers["1:0x0006"] + ) # `zha_send_event` accepts only zigpy responses, lists, and dicts with pytest.raises(TypeError): diff --git a/tests/components/zha/test_device_tracker.py b/tests/components/zha/test_device_tracker.py index 64360c8b2ff..ae96de44f17 100644 --- a/tests/components/zha/test_device_tracker.py +++ b/tests/components/zha/test_device_tracker.py @@ -5,23 +5,22 @@ import time from unittest.mock import patch import pytest -import zigpy.profiles.zha +from zha.application.registries import SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE +from zigpy.profiles import zha from zigpy.zcl.clusters import general from homeassistant.components.device_tracker import SourceType -from homeassistant.components.zha.core.registries import ( - SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_HOME, STATE_NOT_HOME, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from tests.common import async_fire_time_changed @@ -44,49 +43,41 @@ def device_tracker_platforms_only(): yield -@pytest.fixture -def zigpy_device_dt(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.PowerConfiguration.cluster_id, - general.Identify.cluster_id, - general.PollControl.cluster_id, - general.BinaryInput.cluster_id, - ], - SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], - SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - } - return zigpy_device_mock(endpoints) - - async def test_device_tracker( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_dt + hass: HomeAssistant, setup_zha, zigpy_device_mock ) -> None: """Test ZHA device tracker platform.""" - zha_device = await zha_device_joined_restored(zigpy_device_dt) - cluster = zigpy_device_dt.endpoints.get(1).power - entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.PowerConfiguration.cluster_id, + general.Identify.cluster_id, + general.PollControl.cluster_id, + general.BinaryInput.cluster_id, + ], + SIG_EP_OUTPUT: [general.Identify.cluster_id, general.Ota.cluster_id], + SIG_EP_TYPE: SMARTTHINGS_ARRIVAL_SENSOR_DEVICE_TYPE, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].power assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_NOT_HOME - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the device tracker was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - zigpy_device_dt.last_seen = time.time() - 120 - next_update = dt_util.utcnow() + timedelta(seconds=30) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - # test that the state has changed from unavailable to not home assert hass.states.get(entity_id).state == STATE_NOT_HOME @@ -95,7 +86,7 @@ async def test_device_tracker( hass, cluster, {0x0000: 0, 0x0020: 23, 0x0021: 200, 0x0001: 2} ) - zigpy_device_dt.last_seen = time.time() + 10 + zigpy_device.last_seen = time.time() + 10 next_update = dt_util.utcnow() + timedelta(seconds=30) async_fire_time_changed(hass, next_update) await hass.async_block_till_done() @@ -107,7 +98,3 @@ async def test_device_tracker( assert entity.is_connected is True assert entity.source_type == SourceType.ROUTER assert entity.battery_level == 100 - - # test adding device tracker to the network and HA - await async_test_rejoin(hass, zigpy_device_dt, [cluster], (2,)) - assert hass.states.get(entity_id).state == STATE_HOME diff --git a/tests/components/zha/test_device_trigger.py b/tests/components/zha/test_device_trigger.py index b43392af61a..09b2d155547 100644 --- a/tests/components/zha/test_device_trigger.py +++ b/tests/components/zha/test_device_trigger.py @@ -1,35 +1,26 @@ """ZHA device automation trigger tests.""" -from datetime import timedelta -import time from unittest.mock import patch import pytest +from zha.application.const import ATTR_ENDPOINT_ID from zigpy.application import ControllerApplication +from zigpy.device import Device as ZigpyDevice import zigpy.profiles.zha -from zigpy.zcl.clusters import general +import zigpy.types from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.device_automation.exceptions import ( InvalidDeviceAutomationConfig, ) -from homeassistant.components.zha.core.const import ATTR_ENDPOINT_ID +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import Platform from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -import homeassistant.util.dt as dt_util -from .common import async_enable_traffic -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_get_device_automations, - async_mock_service, -) +from tests.common import MockConfigEntry, async_get_device_automations @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -51,16 +42,6 @@ LONG_PRESS = "remote_button_long_press" LONG_RELEASE = "remote_button_long_release" -SWITCH_SIGNATURE = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [general.OnOff.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } -} - - @pytest.fixture(autouse=True) def sensor_platforms_only(): """Only set up the sensor platform and required base platforms to speed up tests.""" @@ -75,31 +56,21 @@ def _same_lists(list_a, list_b): return all(item in list_b for item in list_a) -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - -@pytest.fixture -async def mock_devices(hass, zigpy_device_mock, zha_device_joined_restored): - """IAS device fixture.""" - - zigpy_device = zigpy_device_mock(SWITCH_SIGNATURE) - - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device - - async def test_triggers( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + setup_zha, ) -> None: """Test ZHA device triggers.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -108,9 +79,13 @@ async def test_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -170,14 +145,26 @@ async def test_triggers( async def test_no_triggers( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_devices + hass: HomeAssistant, device_registry: dr.DeviceRegistry, setup_zha ) -> None: """Test ZHA device with no triggers.""" + await setup_zha() + gateway = get_zha_gateway(hass) - _, zha_device = mock_devices - ieee_address = str(zha_device.ieee) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = {} - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) triggers = await async_get_device_automations( hass, DeviceAutomationType.TRIGGER, reg_device.id @@ -197,12 +184,21 @@ async def test_no_triggers( async def test_if_fires_on_event( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], + service_calls: list[ServiceCall], + setup_zha, ) -> None: """Test for remote triggers firing.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + ep = zigpy_device.add_endpoint(1) + ep.add_output_cluster(0x0006) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, @@ -212,8 +208,13 @@ async def test_if_fires_on_event( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) assert await async_setup_component( hass, @@ -239,35 +240,46 @@ async def test_if_fires_on_event( await hass.async_block_till_done() - cluster_handler = zha_device.endpoints[1].client_cluster_handlers["1:0x0006"] - cluster_handler.zha_send_event(COMMAND_SINGLE, []) + zha_device.emit_zha_event( + { + "unique_id": f"{zha_device.ieee}:1:0x0006", + "endpoint_id": 1, + "cluster_id": 0x0006, + "command": COMMAND_SINGLE, + "args": [], + "params": {}, + }, + ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" async def test_device_offline_fires( hass: HomeAssistant, - zigpy_device_mock, - zha_device_restored, - calls: list[ServiceCall], + device_registry: dr.DeviceRegistry, + service_calls: list[ServiceCall], + setup_zha, ) -> None: """Test for device offline triggers firing.""" - zigpy_device = zigpy_device_mock( - { - 1: { - "in_clusters": [general.Basic.cluster_id], - "out_clusters": [general.OnOff.cluster_id], - "device_type": 0, - } - } + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, ) - zha_device = await zha_device_restored(zigpy_device, last_seen=time.time()) - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) assert await async_setup_component( hass, @@ -276,7 +288,7 @@ async def test_device_offline_fires( automation.DOMAIN: [ { "trigger": { - "device_id": zha_device.device_id, + "device_id": reg_device.id, "domain": "zha", "platform": "device", "type": "device_offline", @@ -291,44 +303,39 @@ async def test_device_offline_fires( }, ) - await hass.async_block_till_done() assert zha_device.available is True - - zigpy_device.last_seen = time.time() - zha_device.consider_unavailable_time - 2 - - # there are 3 checkins to perform before marking the device unavailable - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) + zha_device.available = False + zha_device.emit_zha_event({"device_event_type": "device_offline"}) await hass.async_block_till_done() - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - future = dt_util.utcnow() + timedelta( - seconds=zha_device.consider_unavailable_time + 100 - ) - async_fire_time_changed(hass, future) - await hass.async_block_till_done() - - assert zha_device.available is False - assert len(calls) == 1 - assert calls[0].data["message"] == "service called" + assert len(service_calls) == 1 + assert service_calls[0].data["message"] == "service called" async def test_exception_no_triggers( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test for exception when validating device triggers.""" - _, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) await async_setup_component( hass, @@ -361,14 +368,19 @@ async def test_exception_no_triggers( async def test_exception_bad_trigger( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - mock_devices, - calls: list[ServiceCall], caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test for exception when validating device triggers.""" - zigpy_device, zha_device = mock_devices + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) zigpy_device.device_automation_triggers = { (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, @@ -377,8 +389,13 @@ async def test_exception_bad_trigger( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) - reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) + + reg_device = device_registry.async_get_device( + identifiers={("zha", str(zha_device.ieee))} + ) await async_setup_component( hass, @@ -412,23 +429,37 @@ async def test_validate_trigger_config_missing_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zigpy_device_mock, - mock_zigpy_connect: ControllerApplication, - zha_device_joined, caplog: pytest.LogCaptureFixture, + setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - # Join a device - switch = zigpy_device_mock(SWITCH_SIGNATURE) - await zha_device_joined(switch) + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = { + (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, + (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, + (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, + (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, + } + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(switch.ieee))} + identifiers={("zha", str(zha_device.ieee))} ) assert await async_setup_component( @@ -465,16 +496,32 @@ async def test_validate_trigger_config_unloaded_bad_info( hass: HomeAssistant, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zigpy_device_mock, - mock_zigpy_connect: ControllerApplication, - zha_device_joined, caplog: pytest.LogCaptureFixture, + zigpy_app_controller: ControllerApplication, + setup_zha, ) -> None: """Test device triggers referring to a missing device.""" - # Join a device - switch = zigpy_device_mock(SWITCH_SIGNATURE) - await zha_device_joined(switch) + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device = ZigpyDevice( + application=gateway.application_controller, + ieee=zigpy.types.EUI64.convert("aa:bb:cc:dd:11:22:33:44"), + nwk=0x1234, + ) + zigpy_device.device_automation_triggers = { + (SHAKEN, SHAKEN): {COMMAND: COMMAND_SHAKE}, + (DOUBLE_PRESS, DOUBLE_PRESS): {COMMAND: COMMAND_DOUBLE}, + (SHORT_PRESS, SHORT_PRESS): {COMMAND: COMMAND_SINGLE}, + (LONG_PRESS, LONG_PRESS): {COMMAND: COMMAND_HOLD}, + (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, + } + + zigpy_app_controller.devices[zigpy_device.ieee] = zigpy_device + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zha_device.device) + await hass.async_block_till_done(wait_background_tasks=True) # After we unload the config entry, trigger info was not cached on startup, nor can # it be pulled from the current device, making it impossible to validate triggers @@ -482,11 +529,12 @@ async def test_validate_trigger_config_unloaded_bad_info( # Reload ZHA to persist the device info in the cache await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + await hass.config_entries.async_unload(config_entry.entry_id) reg_device = device_registry.async_get_device( - identifiers={("zha", str(switch.ieee))} + identifiers={("zha", str(zha_device.ieee))} ) assert await async_setup_component( diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index 4bb30a5fc8c..bbdc6271207 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -7,9 +7,13 @@ from zigpy.profiles import zha from zigpy.zcl.clusters import security from homeassistant.components.diagnostics import REDACTED -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.helpers import get_zha_gateway from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -41,33 +45,35 @@ def required_platforms_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - async def test_diagnostics_for_config_entry( hass: HomeAssistant, hass_client: ClientSessionGenerator, config_entry: MockConfigEntry, - zha_device_joined, - zigpy_device, + setup_zha, + zigpy_device_mock, ) -> None: """Test diagnostics for config entry.""" - await zha_device_joined(zigpy_device) + await setup_zha() gateway = get_zha_gateway(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + scan = {c: c for c in range(11, 26 + 1)} with patch.object(gateway.application_controller, "energy_scan", return_value=scan): @@ -106,19 +112,40 @@ async def test_diagnostics_for_device( hass_client: ClientSessionGenerator, device_registry: dr.DeviceRegistry, config_entry: MockConfigEntry, - zha_device_joined, - zigpy_device, + setup_zha, + zigpy_device_mock, ) -> None: """Test diagnostics for device.""" - zha_device: ZHADevice = await zha_device_joined(zigpy_device) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id, security.IasZone.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) # add unknown unsupported attribute with id and name - zha_device.device.endpoints[1].in_clusters[ + zha_device_proxy.device.device.endpoints[1].in_clusters[ security.IasAce.cluster_id ].unsupported_attributes.update({0x1000, "unknown_attribute_name"}) # add known unsupported attributes with id and name - zha_device.device.endpoints[1].in_clusters[ + zha_device_proxy.device.device.endpoints[1].in_clusters[ security.IasZone.cluster_id ].unsupported_attributes.update( { @@ -128,14 +155,14 @@ async def test_diagnostics_for_device( ) device = device_registry.async_get_device( - identifiers={("zha", str(zha_device.ieee))} + identifiers={("zha", str(zha_device_proxy.device.ieee))} ) assert device diagnostics_data = await get_diagnostics_for_device( hass, hass_client, config_entry, device ) assert diagnostics_data - device_info: dict = zha_device.zha_device_info + device_info: dict = zha_device_proxy.zha_device_info for key in device_info: assert key in diagnostics_data if key not in KEYS_TO_REDACT: diff --git a/tests/components/zha/test_discover.py b/tests/components/zha/test_discover.py deleted file mode 100644 index c59acc3395f..00000000000 --- a/tests/components/zha/test_discover.py +++ /dev/null @@ -1,1100 +0,0 @@ -"""Test ZHA device discovery.""" - -from collections.abc import Callable -import enum -import itertools -import re -from typing import Any -from unittest import mock -from unittest.mock import AsyncMock, Mock, patch - -import pytest -from zhaquirks.ikea import PowerConfig1CRCluster, ScenesCluster -from zhaquirks.xiaomi import ( - BasicCluster, - LocalIlluminanceMeasurementCluster, - XiaomiPowerConfigurationPercent, -) -from zhaquirks.xiaomi.aqara.driver_curtain_e1 import ( - WindowCoveringE1, - XiaomiAqaraDriverE1, -) -from zigpy.const import SIG_ENDPOINTS, SIG_MANUFACTURER, SIG_MODEL, SIG_NODE_DESC -import zigpy.profiles.zha -import zigpy.quirks -from zigpy.quirks.v2 import ( - BinarySensorMetadata, - EntityMetadata, - EntityType, - NumberMetadata, - QuirksV2RegistryEntry, - ZCLCommandButtonMetadata, - ZCLSensorMetadata, - add_to_registry_v2, -) -from zigpy.quirks.v2.homeassistant import UnitOfTime -import zigpy.types -from zigpy.zcl import ClusterType -import zigpy.zcl.clusters.closures -import zigpy.zcl.clusters.general -import zigpy.zcl.clusters.security -import zigpy.zcl.foundation as zcl_f - -from homeassistant.components.zha.core import cluster_handlers -import homeassistant.components.zha.core.const as zha_const -from homeassistant.components.zha.core.device import ZHADevice -import homeassistant.components.zha.core.discovery as disc -from homeassistant.components.zha.core.endpoint import Endpoint -from homeassistant.components.zha.core.helpers import get_zha_gateway -import homeassistant.components.zha.core.registries as zha_regs -from homeassistant.const import STATE_OFF, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_platform import EntityPlatform -from homeassistant.util.json import load_json - -from .common import find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from .zha_devices_list import ( - DEV_SIG_ATTRIBUTES, - DEV_SIG_CLUSTER_HANDLERS, - DEV_SIG_ENT_MAP, - DEV_SIG_ENT_MAP_CLASS, - DEV_SIG_ENT_MAP_ID, - DEV_SIG_EVT_CLUSTER_HANDLERS, - DEVICES, -) - -NO_TAIL_ID = re.compile("_\\d$") -UNIQUE_ID_HD = re.compile(r"^(([\da-fA-F]{2}:){7}[\da-fA-F]{2}-\d{1,3})", re.X) - -IGNORE_SUFFIXES = [ - zigpy.zcl.clusters.general.OnOff.StartUpOnOff.__name__, - "on_off_transition_time", - "on_level", - "on_transition_time", - "off_transition_time", - "default_move_rate", - "start_up_current_level", - "counter", -] - - -def contains_ignored_suffix(unique_id: str) -> bool: - """Return true if the unique_id ends with an ignored suffix.""" - return any(suffix.lower() in unique_id.lower() for suffix in IGNORE_SUFFIXES) - - -@patch( - "zigpy.zcl.clusters.general.Identify.request", - new=AsyncMock(return_value=[mock.sentinel.data, zcl_f.Status.SUCCESS]), -) -# We do this here because we are testing ZHA discovery logic. Point being we want to ensure that -# all discovered entities are dispatched for creation. In order to test this we need the entities -# added to HA. So we ensure that they are all enabled even though they won't necessarily be in reality -# at runtime -@patch( - "homeassistant.components.zha.entity.ZhaEntity.entity_registry_enabled_default", - new=Mock(return_value=True), -) -@pytest.mark.parametrize("device", DEVICES) -async def test_devices( - device, - hass_disable_services, - zigpy_device_mock, - zha_device_joined_restored, -) -> None: - """Test device discovery.""" - zigpy_device = zigpy_device_mock( - endpoints=device[SIG_ENDPOINTS], - ieee="00:11:22:33:44:55:66:77", - manufacturer=device[SIG_MANUFACTURER], - model=device[SIG_MODEL], - node_descriptor=device[SIG_NODE_DESC], - attributes=device.get(DEV_SIG_ATTRIBUTES), - patch_cluster=False, - ) - - cluster_identify = _get_first_identify_cluster(zigpy_device) - if cluster_identify: - cluster_identify.request.reset_mock() - - with patch( - "homeassistant.helpers.entity_platform.EntityPlatform._async_schedule_add_entities_for_entry", - side_effect=EntityPlatform._async_schedule_add_entities_for_entry, - autospec=True, - ) as mock_add_entities: - zha_dev = await zha_device_joined_restored(zigpy_device) - await hass_disable_services.async_block_till_done() - - if cluster_identify: - # We only identify on join - should_identify = ( - zha_device_joined_restored.name == "zha_device_joined" - and not zigpy_device.skip_configuration - ) - - if should_identify: - assert cluster_identify.request.mock_calls == [ - mock.call( - False, - cluster_identify.commands_by_name["trigger_effect"].id, - cluster_identify.commands_by_name["trigger_effect"].schema, - effect_id=zigpy.zcl.clusters.general.Identify.EffectIdentifier.Okay, - effect_variant=( - zigpy.zcl.clusters.general.Identify.EffectVariant.Default - ), - expect_reply=True, - manufacturer=None, - tsn=None, - ) - ] - else: - assert cluster_identify.request.mock_calls == [] - - event_cluster_handlers = { - ch.id - for endpoint in zha_dev._endpoints.values() - for ch in endpoint.client_cluster_handlers.values() - } - assert event_cluster_handlers == set(device[DEV_SIG_EVT_CLUSTER_HANDLERS]) - - # Keep track of unhandled entities: they should always be ones we explicitly ignore - created_entities = { - entity.entity_id: entity - for mock_call in mock_add_entities.mock_calls - for entity in mock_call.args[1] - } - unhandled_entities = set(created_entities.keys()) - entity_registry = er.async_get(hass_disable_services) - - for (platform, unique_id), ent_info in device[DEV_SIG_ENT_MAP].items(): - no_tail_id = NO_TAIL_ID.sub("", ent_info[DEV_SIG_ENT_MAP_ID]) - ha_entity_id = entity_registry.async_get_entity_id(platform, "zha", unique_id) - message1 = f"No entity found for platform[{platform}] unique_id[{unique_id}]" - message2 = f"no_tail_id[{no_tail_id}] with entity_id[{ha_entity_id}]" - assert ha_entity_id is not None, f"{message1} {message2}" - assert ha_entity_id.startswith(no_tail_id) - - entity = created_entities[ha_entity_id] - unhandled_entities.remove(ha_entity_id) - - assert entity.platform.domain == platform - assert type(entity).__name__ == ent_info[DEV_SIG_ENT_MAP_CLASS] - # unique_id used for discover is the same for "multi entities" - assert unique_id == entity.unique_id - assert {ch.name for ch in entity.cluster_handlers.values()} == set( - ent_info[DEV_SIG_CLUSTER_HANDLERS] - ) - - # All unhandled entities should be ones we explicitly ignore - for entity_id in unhandled_entities: - domain = entity_id.split(".")[0] - assert domain in zha_const.PLATFORMS - assert contains_ignored_suffix(entity_id) - - -def _get_first_identify_cluster(zigpy_device): - for endpoint in list(zigpy_device.endpoints.values())[1:]: - if hasattr(endpoint, "identify"): - return endpoint.identify - - -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_device_type" -) -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.discover_by_cluster_id" -) -def test_discover_entities(m1, m2) -> None: - """Test discover endpoint class method.""" - endpoint = mock.MagicMock() - disc.PROBE.discover_entities(endpoint) - assert m1.call_count == 1 - assert m1.call_args[0][0] is endpoint - assert m2.call_count == 1 - assert m2.call_args[0][0] is endpoint - - -@pytest.mark.parametrize( - ("device_type", "platform", "hit"), - [ - (zigpy.profiles.zha.DeviceType.ON_OFF_LIGHT, Platform.LIGHT, True), - (zigpy.profiles.zha.DeviceType.ON_OFF_BALLAST, Platform.SWITCH, True), - (zigpy.profiles.zha.DeviceType.SMART_PLUG, Platform.SWITCH, True), - (0xFFFF, None, False), - ], -) -def test_discover_by_device_type(device_type, platform, hit) -> None: - """Test entity discovery by device type.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = device_type - type(endpoint).zigpy_endpoint = ep_mock - - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - with mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ): - disc.PROBE.discover_by_device_type(endpoint) - if hit: - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == platform - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - - -def test_discover_by_device_type_override() -> None: - """Test entity discovery by device type overriding.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = 0x0100 - type(endpoint).zigpy_endpoint = ep_mock - - overrides = {endpoint.unique_id: {"type": Platform.SWITCH}} - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - with ( - mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ), - mock.patch.dict(disc.PROBE._device_configs, overrides, clear=True), - ): - disc.PROBE.discover_by_device_type(endpoint) - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - - -def test_discover_probe_single_cluster() -> None: - """Test entity discovery by single cluster.""" - - endpoint = mock.MagicMock(spec_set=Endpoint) - ep_mock = mock.PropertyMock() - ep_mock.return_value.profile_id = 0x0104 - ep_mock.return_value.device_type = 0x0100 - type(endpoint).zigpy_endpoint = ep_mock - - get_entity_mock = mock.MagicMock( - return_value=(mock.sentinel.entity_cls, mock.sentinel.claimed) - ) - cluster_handler_mock = mock.MagicMock(spec_set=cluster_handlers.ClusterHandler) - with mock.patch( - "homeassistant.components.zha.core.registries.ZHA_ENTITIES.get_entity", - get_entity_mock, - ): - disc.PROBE.probe_single_cluster(Platform.SWITCH, cluster_handler_mock, endpoint) - - assert get_entity_mock.call_count == 1 - assert endpoint.claim_cluster_handlers.call_count == 1 - assert endpoint.claim_cluster_handlers.call_args[0][0] is mock.sentinel.claimed - assert endpoint.async_new_entity.call_count == 1 - assert endpoint.async_new_entity.call_args[0][0] == Platform.SWITCH - assert endpoint.async_new_entity.call_args[0][1] == mock.sentinel.entity_cls - assert endpoint.async_new_entity.call_args[0][3] == mock.sentinel.claimed - - -@pytest.mark.parametrize("device_info", DEVICES) -async def test_discover_endpoint( - device_info: dict[str, Any], - zha_device_mock: Callable[..., ZHADevice], - hass: HomeAssistant, -) -> None: - """Test device discovery.""" - - with mock.patch( - "homeassistant.components.zha.core.endpoint.Endpoint.async_new_entity" - ) as new_ent: - device = zha_device_mock( - device_info[SIG_ENDPOINTS], - manufacturer=device_info[SIG_MANUFACTURER], - model=device_info[SIG_MODEL], - node_desc=device_info[SIG_NODE_DESC], - patch_cluster=True, - ) - - assert device_info[DEV_SIG_EVT_CLUSTER_HANDLERS] == sorted( - ch.id - for endpoint in device._endpoints.values() - for ch in endpoint.client_cluster_handlers.values() - ) - - # build a dict of entity_class -> (platform, unique_id, cluster_handlers) tuple - ha_ent_info = {} - for call in new_ent.call_args_list: - platform, entity_cls, unique_id, cluster_handlers = call[0] - if not contains_ignored_suffix(unique_id): - unique_id_head = UNIQUE_ID_HD.match(unique_id).group( - 0 - ) # ieee + endpoint_id - ha_ent_info[(unique_id_head, entity_cls.__name__)] = ( - platform, - unique_id, - cluster_handlers, - ) - - for platform_id, ent_info in device_info[DEV_SIG_ENT_MAP].items(): - platform, unique_id = platform_id - - test_ent_class = ent_info[DEV_SIG_ENT_MAP_CLASS] - test_unique_id_head = UNIQUE_ID_HD.match(unique_id).group(0) - assert (test_unique_id_head, test_ent_class) in ha_ent_info - - entity_platform, entity_unique_id, entity_cluster_handlers = ha_ent_info[ - (test_unique_id_head, test_ent_class) - ] - assert platform is entity_platform.value - # unique_id used for discover is the same for "multi entities" - assert unique_id.startswith(entity_unique_id) - assert {ch.name for ch in entity_cluster_handlers} == set( - ent_info[DEV_SIG_CLUSTER_HANDLERS] - ) - - device.async_cleanup_handles() - - -def _ch_mock(cluster): - """Return mock of a cluster_handler with a cluster.""" - cluster_handler = mock.MagicMock() - type(cluster_handler).cluster = mock.PropertyMock( - return_value=cluster(mock.MagicMock()) - ) - return cluster_handler - - -@mock.patch( - ( - "homeassistant.components.zha.core.discovery.ProbeEndpoint" - ".handle_on_off_output_cluster_exception" - ), - new=mock.MagicMock(), -) -@mock.patch( - "homeassistant.components.zha.core.discovery.ProbeEndpoint.probe_single_cluster" -) -def _test_single_input_cluster_device_class(probe_mock): - """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" - - door_ch = _ch_mock(zigpy.zcl.clusters.closures.DoorLock) - cover_ch = _ch_mock(zigpy.zcl.clusters.closures.WindowCovering) - multistate_ch = _ch_mock(zigpy.zcl.clusters.general.MultistateInput) - - class QuirkedIAS(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.security.IasZone): - pass - - ias_ch = _ch_mock(QuirkedIAS) - - class _Analog(zigpy.quirks.CustomCluster, zigpy.zcl.clusters.general.AnalogInput): - pass - - analog_ch = _ch_mock(_Analog) - - endpoint = mock.MagicMock(spec_set=Endpoint) - endpoint.unclaimed_cluster_handlers.return_value = [ - door_ch, - cover_ch, - multistate_ch, - ias_ch, - ] - - disc.ProbeEndpoint().discover_by_cluster_id(endpoint) - assert probe_mock.call_count == len(endpoint.unclaimed_cluster_handlers()) - probes = ( - (Platform.LOCK, door_ch), - (Platform.COVER, cover_ch), - (Platform.SENSOR, multistate_ch), - (Platform.BINARY_SENSOR, ias_ch), - (Platform.SENSOR, analog_ch), - ) - for call, details in zip(probe_mock.call_args_list, probes, strict=False): - platform, ch = details - assert call[0][0] == platform - assert call[0][1] == ch - - -def test_single_input_cluster_device_class_by_cluster_class() -> None: - """Test SINGLE_INPUT_CLUSTER_DEVICE_CLASS matching by cluster id or class.""" - mock_reg = { - zigpy.zcl.clusters.closures.DoorLock.cluster_id: Platform.LOCK, - zigpy.zcl.clusters.closures.WindowCovering.cluster_id: Platform.COVER, - zigpy.zcl.clusters.general.AnalogInput: Platform.SENSOR, - zigpy.zcl.clusters.general.MultistateInput: Platform.SENSOR, - zigpy.zcl.clusters.security.IasZone: Platform.BINARY_SENSOR, - } - - with mock.patch.dict( - zha_regs.SINGLE_INPUT_CLUSTER_DEVICE_CLASS, mock_reg, clear=True - ): - _test_single_input_cluster_device_class() - - -@pytest.mark.parametrize( - ("override", "entity_id"), - [ - (None, "light.manufacturer_model_light"), - ("switch", "switch.manufacturer_model_switch"), - ], -) -async def test_device_override( - hass_disable_services, zigpy_device_mock, setup_zha, override, entity_id -) -> None: - """Test device discovery override.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.COLOR_DIMMABLE_LIGHT, - "endpoint_id": 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - } - }, - "00:11:22:33:44:55:66:77", - "manufacturer", - "model", - patch_cluster=False, - ) - - if override is not None: - override = {"device_config": {"00:11:22:33:44:55:66:77-1": {"type": override}}} - - await setup_zha(override) - assert hass_disable_services.states.get(entity_id) is None - zha_gateway = get_zha_gateway(hass_disable_services) - await zha_gateway.async_device_initialized(zigpy_device) - await hass_disable_services.async_block_till_done() - assert hass_disable_services.states.get(entity_id) is not None - - -async def test_group_probe_cleanup_called( - hass_disable_services, setup_zha, config_entry -) -> None: - """Test cleanup happens when ZHA is unloaded.""" - await setup_zha() - disc.GROUP_PROBE.cleanup = mock.Mock(wraps=disc.GROUP_PROBE.cleanup) - await hass_disable_services.config_entries.async_unload(config_entry.entry_id) - await hass_disable_services.async_block_till_done() - disc.GROUP_PROBE.cleanup.assert_called() - - -async def test_quirks_v2_entity_discovery( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test quirks v2 discovery.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Scenes.cluster_id, - ], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer="Ikea of Sweden", - model="TRADFRI remote control", - ) - - ( - add_to_registry_v2( - "Ikea of Sweden", "TRADFRI remote control", zigpy.quirks._DEVICE_REGISTRY - ) - .replaces(PowerConfig1CRCluster) - .replaces(ScenesCluster, cluster_type=ClusterType.Client) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - ) - - zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) - zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { - "battery_voltage": 3, - "battery_percentage_remaining": 100, - } - update_attribute_cache(zigpy_device.endpoints[1].power) - zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, - } - update_attribute_cache(zigpy_device.endpoints[1].on_off) - - zha_device = await zha_device_joined(zigpy_device) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - ) - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state is not None - - -async def test_quirks_v2_entity_discovery_e1_curtain( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test quirks v2 discovery for e1 curtain motor.""" - aqara_E1_device = zigpy_device_mock( - { - 1: { - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.Basic.cluster_id, - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.general.Time.cluster_id, - WindowCoveringE1.cluster_id, - XiaomiAqaraDriverE1.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Identify.cluster_id, - zigpy.zcl.clusters.general.Time.cluster_id, - zigpy.zcl.clusters.general.Ota.cluster_id, - XiaomiAqaraDriverE1.cluster_id, - ], - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer="LUMI", - model="lumi.curtain.agl006", - ) - - class AqaraE1HookState(zigpy.types.enum8): - """Aqara hook state.""" - - Unlocked = 0x00 - Locked = 0x01 - Locking = 0x02 - Unlocking = 0x03 - - class FakeXiaomiAqaraDriverE1(XiaomiAqaraDriverE1): - """Fake XiaomiAqaraDriverE1 cluster.""" - - attributes = XiaomiAqaraDriverE1.attributes.copy() - attributes.update( - { - 0x9999: ("error_detected", zigpy.types.Bool, True), - } - ) - - ( - add_to_registry_v2("LUMI", "lumi.curtain.agl006") - .adds(LocalIlluminanceMeasurementCluster) - .replaces(BasicCluster) - .replaces(XiaomiPowerConfigurationPercent) - .replaces(WindowCoveringE1) - .replaces(FakeXiaomiAqaraDriverE1) - .removes(FakeXiaomiAqaraDriverE1, cluster_type=ClusterType.Client) - .enum( - BasicCluster.AttributeDefs.power_source.name, - BasicCluster.PowerSource, - BasicCluster.cluster_id, - entity_platform=Platform.SENSOR, - entity_type=EntityType.DIAGNOSTIC, - ) - .enum( - "hooks_state", - AqaraE1HookState, - FakeXiaomiAqaraDriverE1.cluster_id, - entity_platform=Platform.SENSOR, - entity_type=EntityType.DIAGNOSTIC, - ) - .binary_sensor( - "error_detected", - FakeXiaomiAqaraDriverE1.cluster_id, - translation_key="valve_alarm", - ) - ) - - aqara_E1_device = zigpy.quirks._DEVICE_REGISTRY.get_device(aqara_E1_device) - - aqara_E1_device.endpoints[1].opple_cluster.PLUGGED_ATTR_READS = { - "hand_open": 0, - "positions_stored": 0, - "hooks_lock": 0, - "hooks_state": AqaraE1HookState.Unlocked, - "light_level": 0, - "error_detected": 0, - } - update_attribute_cache(aqara_E1_device.endpoints[1].opple_cluster) - - aqara_E1_device.endpoints[1].basic.PLUGGED_ATTR_READS = { - BasicCluster.AttributeDefs.power_source.name: BasicCluster.PowerSource.Mains_single_phase, - } - update_attribute_cache(aqara_E1_device.endpoints[1].basic) - - WCAttrs = zigpy.zcl.clusters.closures.WindowCovering.AttributeDefs - WCT = zigpy.zcl.clusters.closures.WindowCovering.WindowCoveringType - WCCS = zigpy.zcl.clusters.closures.WindowCovering.ConfigStatus - aqara_E1_device.endpoints[1].window_covering.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 0, - WCAttrs.window_covering_type.name: WCT.Drapery, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(aqara_E1_device.endpoints[1].window_covering) - - zha_device = await zha_device_joined(aqara_E1_device) - - power_source_entity_id = find_entity_id( - Platform.SENSOR, - zha_device, - hass, - qualifier=BasicCluster.AttributeDefs.power_source.name, - ) - assert power_source_entity_id is not None - state = hass.states.get(power_source_entity_id) - assert state is not None - assert state.state == BasicCluster.PowerSource.Mains_single_phase.name - - hook_state_entity_id = find_entity_id( - Platform.SENSOR, - zha_device, - hass, - qualifier="hooks_state", - ) - assert hook_state_entity_id is not None - state = hass.states.get(hook_state_entity_id) - assert state is not None - assert state.state == AqaraE1HookState.Unlocked.name - - error_detected_entity_id = find_entity_id( - Platform.BINARY_SENSOR, - zha_device, - hass, - ) - assert error_detected_entity_id is not None - state = hass.states.get(error_detected_entity_id) - assert state is not None - assert state.state == STATE_OFF - - -def _get_test_device( - zigpy_device_mock, - manufacturer: str, - model: str, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry] - | None = None, -): - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - zigpy.zcl.clusters.general.PowerConfiguration.cluster_id, - zigpy.zcl.clusters.general.Groups.cluster_id, - zigpy.zcl.clusters.general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [ - zigpy.zcl.clusters.general.Scenes.cluster_id, - ], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.NON_COLOR_CONTROLLER, - } - }, - ieee="01:2d:6f:00:0a:90:69:e8", - manufacturer=manufacturer, - model=model, - ) - - v2_quirk = ( - add_to_registry_v2(manufacturer, model, zigpy.quirks._DEVICE_REGISTRY) - .replaces(PowerConfig1CRCluster) - .replaces(ScenesCluster, cluster_type=ClusterType.Client) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - endpoint_id=3, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - .number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.Time.cluster_id, - min_value=1, - max_value=100, - step=1, - unit=UnitOfTime.SECONDS, - multiplier=1, - translation_key="on_off_transition_time", - ) - .sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="analog_input", - ) - ) - - if augment_method: - v2_quirk = augment_method(v2_quirk) - - zigpy_device = zigpy.quirks._DEVICE_REGISTRY.get_device(zigpy_device) - zigpy_device.endpoints[1].power.PLUGGED_ATTR_READS = { - "battery_voltage": 3, - "battery_percentage_remaining": 100, - } - update_attribute_cache(zigpy_device.endpoints[1].power) - zigpy_device.endpoints[1].on_off.PLUGGED_ATTR_READS = { - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name: 3, - } - update_attribute_cache(zigpy_device.endpoints[1].on_off) - return zigpy_device - - -async def test_quirks_v2_entity_no_metadata( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test quirks v2 discovery skipped - no metadata.""" - - zigpy_device = _get_test_device( - zigpy_device_mock, "Ikea of Sweden2", "TRADFRI remote control2" - ) - setattr(zigpy_device, "_exposes_metadata", {}) - zha_device = await zha_device_joined(zigpy_device) - assert ( - f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not expose any quirks v2 entities" - in caplog.text - ) - - -async def test_quirks_v2_entity_discovery_errors( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test quirks v2 discovery skipped - errors.""" - - zigpy_device = _get_test_device( - zigpy_device_mock, "Ikea of Sweden3", "TRADFRI remote control3" - ) - zha_device = await zha_device_joined(zigpy_device) - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have an" - m2 = " endpoint with id: 3 - unable to create entity with cluster" - m3 = " details: (3, 6, )" - assert f"{m1}{m2}{m3}" in caplog.text - - time_cluster_id = zigpy.zcl.clusters.general.Time.cluster_id - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} does not have a" - m2 = f" cluster with id: {time_cluster_id} - unable to create entity with " - m3 = f"cluster details: (1, {time_cluster_id}, )" - assert f"{m1}{m2}{m3}" in caplog.text - - # fmt: off - entity_details = ( - "{'cluster_details': (1, 6, ), 'entity_metadata': " - "ZCLSensorMetadata(entity_platform=, " - "entity_type=, cluster_id=6, endpoint_id=1, " - "cluster_type=, initially_disabled=False, " - "attribute_initialized_from_cache=True, translation_key='analog_input', " - "attribute_name='off_wait_time', divisor=1, multiplier=1, " - "unit=None, device_class=None, state_class=None)}" - ) - # fmt: on - - m1 = f"Device: {zigpy_device.ieee!s}-{zha_device.name} has an entity with " - m2 = f"details: {entity_details} that does not have an entity class mapping - " - m3 = "unable to create entity" - assert f"{m1}{m2}{m3}" in caplog.text - - -DEVICE_CLASS_TYPES = [NumberMetadata, BinarySensorMetadata, ZCLSensorMetadata] - - -def validate_device_class_unit( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Ensure device class and unit are used correctly.""" - if ( - hasattr(entity_metadata, "unit") - and entity_metadata.unit is not None - and hasattr(entity_metadata, "device_class") - and entity_metadata.device_class is not None - ): - m1 = "device_class and unit are both set - unit: " - m2 = f"{entity_metadata.unit} device_class: " - m3 = f"{entity_metadata.device_class} for {platform.name} " - raise ValueError(f"{m1}{m2}{m3}{quirk}") - - -def validate_translation_keys( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Ensure translation keys exist for all v2 quirks.""" - if isinstance(entity_metadata, ZCLCommandButtonMetadata): - default_translation_key = entity_metadata.command_name - else: - default_translation_key = entity_metadata.attribute_name - translation_key = entity_metadata.translation_key or default_translation_key - - if ( - translation_key is not None - and translation_key not in translations["entity"][platform] - ): - raise ValueError( - f"Missing translation key: {translation_key} for {platform.name} {quirk}" - ) - - -def validate_translation_keys_device_class( - quirk: QuirksV2RegistryEntry, - entity_metadata: EntityMetadata, - platform: Platform, - translations: dict, -) -> None: - """Validate translation keys and device class usage.""" - if isinstance(entity_metadata, ZCLCommandButtonMetadata): - default_translation_key = entity_metadata.command_name - else: - default_translation_key = entity_metadata.attribute_name - translation_key = entity_metadata.translation_key or default_translation_key - - metadata_type = type(entity_metadata) - if metadata_type in DEVICE_CLASS_TYPES: - device_class = entity_metadata.device_class - if device_class is not None and translation_key is not None: - m1 = "translation_key and device_class are both set - translation_key: " - m2 = f"{translation_key} device_class: {device_class} for {platform.name} " - raise ValueError(f"{m1}{m2}{quirk}") - - -def validate_metadata(validator: Callable) -> None: - """Ensure v2 quirks metadata does not violate HA rules.""" - all_v2_quirks = itertools.chain.from_iterable( - zigpy.quirks._DEVICE_REGISTRY._registry_v2.values() - ) - translations = load_json("homeassistant/components/zha/strings.json") - for quirk in all_v2_quirks: - for entity_metadata in quirk.entity_metadata: - platform = Platform(entity_metadata.entity_platform.value) - validator(quirk, entity_metadata, platform, translations) - - -def bad_translation_key(v2_quirk: QuirksV2RegistryEntry) -> QuirksV2RegistryEntry: - """Introduce a bad translation key.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="missing_translation_key", - ) - - -def bad_device_class_unit_combination( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class and unit combination.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - unit="invalid", - device_class="invalid", - translation_key="analog_input", - ) - - -def bad_device_class_translation_key_usage( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class and translation key combination.""" - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - entity_type=EntityType.CONFIG, - translation_key="invalid", - device_class="invalid", - ) - - -@pytest.mark.parametrize( - ("augment_method", "validate_method", "expected_exception_string"), - [ - ( - bad_translation_key, - validate_translation_keys, - "Missing translation key: missing_translation_key", - ), - ( - bad_device_class_unit_combination, - validate_device_class_unit, - "cannot have both unit and device_class", - ), - ( - bad_device_class_translation_key_usage, - validate_translation_keys_device_class, - "cannot have both a translation_key and a device_class", - ), - ], -) -async def test_quirks_v2_metadata_errors( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], - validate_method: Callable, - expected_exception_string: str, -) -> None: - """Ensure all v2 quirks translation keys exist.""" - - # no error yet - validate_metadata(validate_method) - - # ensure the error is caught and raised - try: - # introduce an error - zigpy_device = _get_test_device( - zigpy_device_mock, - "Ikea of Sweden4", - "TRADFRI remote control4", - augment_method=augment_method, - ) - await zha_device_joined(zigpy_device) - - validate_metadata(validate_method) - # if the device was created we remove it - # so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) - except ValueError: - # if the device was not created we remove it - # so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY._registry_v2.pop( - ( - "Ikea of Sweden4", - "TRADFRI remote control4", - ) - ) - with pytest.raises(ValueError, match=expected_exception_string): - raise - - -class BadDeviceClass(enum.Enum): - """Bad device class.""" - - BAD = "bad" - - -def bad_binary_sensor_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a binary sensor.""" - - return v2_quirk.binary_sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_off.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -def bad_sensor_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a sensor.""" - - return v2_quirk.sensor( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.off_wait_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -def bad_number_device_class( - v2_quirk: QuirksV2RegistryEntry, -) -> QuirksV2RegistryEntry: - """Introduce a bad device class on a number.""" - - return v2_quirk.number( - zigpy.zcl.clusters.general.OnOff.AttributeDefs.on_time.name, - zigpy.zcl.clusters.general.OnOff.cluster_id, - device_class=BadDeviceClass.BAD, - ) - - -ERROR_ROOT = "Quirks provided an invalid device class" - - -@pytest.mark.parametrize( - ("augment_method", "expected_exception_string"), - [ - ( - bad_binary_sensor_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform binary_sensor", - ), - ( - bad_sensor_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform sensor", - ), - ( - bad_number_device_class, - f"{ERROR_ROOT}: BadDeviceClass.BAD for platform number", - ), - ], -) -async def test_quirks_v2_metadata_bad_device_classes( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - caplog: pytest.LogCaptureFixture, - augment_method: Callable[[QuirksV2RegistryEntry], QuirksV2RegistryEntry], - expected_exception_string: str, -) -> None: - """Test bad quirks v2 device classes.""" - - # introduce an error - zigpy_device = _get_test_device( - zigpy_device_mock, - "Ikea of Sweden4", - "TRADFRI remote control4", - augment_method=augment_method, - ) - await zha_device_joined(zigpy_device) - - assert expected_exception_string in caplog.text - - # remove the device so we don't pollute the rest of the tests - zigpy.quirks._DEVICE_REGISTRY.remove(zigpy_device) diff --git a/tests/components/zha/test_fan.py b/tests/components/zha/test_fan.py index 095f505876e..0105c569653 100644 --- a/tests/components/zha/test_fan.py +++ b/tests/components/zha/test_fan.py @@ -1,32 +1,25 @@ """Test ZHA fan.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import call, patch import pytest -import zhaquirks.ikea.starkvind -from zigpy.device import Device -from zigpy.exceptions import ZigbeeException +from zha.application.platforms.fan.const import PRESET_MODE_ON from zigpy.profiles import zha from zigpy.zcl.clusters import general, hvac -import zigpy.zcl.foundation as zcl_f from homeassistant.components.fan import ( ATTR_PERCENTAGE, - ATTR_PERCENTAGE_STEP, ATTR_PRESET_MODE, DOMAIN as FAN_DOMAIN, SERVICE_SET_PERCENTAGE, SERVICE_SET_PRESET_MODE, NotValidPresetModeError, ) -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.components.zha.core.discovery import GROUP_PROBE -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.components.zha.fan import ( - PRESET_MODE_AUTO, - PRESET_MODE_ON, - PRESET_MODE_SMART, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -34,25 +27,15 @@ from homeassistant.const import ( SERVICE_TURN_ON, STATE_OFF, STATE_ON, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_find_group_entity_id, - async_test_rejoin, - async_wait_for_updates, - find_entity_id, - send_attributes_report, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" +ON = 1 +OFF = 0 @pytest.fixture(autouse=True) @@ -75,122 +58,49 @@ def fan_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Fan zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [hvac.Fan.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00" - ) - - -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): +async def test_fan(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA fan platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], + SIG_EP_INPUT: [general.Basic.cluster_id, hvac.Fan.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def device_fan_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Groups.cluster_id, - general.OnOff.cluster_id, - hvac.Fan.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - }, - ieee=IEEE_GROUPABLE_DEVICE, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -@pytest.fixture -async def device_fan_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Groups.cluster_id, - general.OnOff.cluster_id, - hvac.Fan.cluster_id, - general.LevelControl.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -async def test_fan( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device -) -> None: - """Test ZHA fan platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device) - cluster = zigpy_device.endpoints.get(1).fan - entity_id = find_entity_id(Platform.FAN, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.FAN, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].fan assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at fan - await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) + await send_attributes_report( + hass, + cluster, + {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Low}, + ) assert hass.states.get(entity_id).state == STATE_ON # turn off at fan - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) + await send_attributes_report( + hass, cluster, {hvac.Fan.AttributeDefs.fan_mode.id: hvac.FanMode.Off} + ) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -230,11 +140,8 @@ async def test_fan( assert exc.value.translation_key == "not_valid_preset_mode" assert len(cluster.write_attributes.mock_calls) == 0 - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) - -async def async_turn_on(hass, entity_id, percentage=None): +async def async_turn_on(hass: HomeAssistant, entity_id, percentage=None): """Turn fan on.""" data = { key: value @@ -245,14 +152,14 @@ async def async_turn_on(hass, entity_id, percentage=None): await hass.services.async_call(Platform.FAN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass, entity_id): +async def async_turn_off(hass: HomeAssistant, entity_id): """Turn fan off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(Platform.FAN, SERVICE_TURN_OFF, data, blocking=True) -async def async_set_percentage(hass, entity_id, percentage=None): +async def async_set_percentage(hass: HomeAssistant, entity_id, percentage=None): """Set percentage for specified fan.""" data = { key: value @@ -265,7 +172,7 @@ async def async_set_percentage(hass, entity_id, percentage=None): ) -async def async_set_preset_mode(hass, entity_id, preset_mode=None): +async def async_set_preset_mode(hass: HomeAssistant, entity_id, preset_mode=None): """Set preset_mode for specified fan.""" data = { key: value @@ -276,633 +183,3 @@ async def async_set_preset_mode(hass, entity_id, preset_mode=None): await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, data, blocking=True ) - - -@patch( - "zigpy.zcl.clusters.hvac.Fan.write_attributes", - new=AsyncMock(return_value=zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]), -) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_fan_entity( - hass: HomeAssistant, device_fan_1, device_fan_2, coordinator -) -> None: - """Test the fan entity for a ZHA group.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_fan_1._zha_gateway = zha_gateway - device_fan_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] - members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) - assert len(entity_domains) == 2 - - assert Platform.LIGHT in entity_domains - assert Platform.FAN in entity_domains - - entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) - assert hass.states.get(entity_id) is not None - - group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] - - dev1_fan_cluster = device_fan_1.device.endpoints[1].fan - dev2_fan_cluster = device_fan_2.device.endpoints[1].fan - - await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) - await async_wait_for_updates(hass) - # test that the fans were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_fan_1, device_fan_2]) - await async_wait_for_updates(hass) - # test that the fan group entity was created and is off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - group_fan_cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - await hass.async_block_till_done() - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} - - # turn off from HA - group_fan_cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 0} - - # change speed from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 3} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 4} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5} - - # change preset mode from HA - group_fan_cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 6} - - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_fan_cluster, {0: 0}) - await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) - await hass.async_block_till_done() - - # test that group fan is off - assert hass.states.get(entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev2_fan_cluster, {0: 2}) - await async_wait_for_updates(hass) - - # test that group fan is speed medium - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_fan_cluster, {0: 0}) - await async_wait_for_updates(hass) - - # test that group fan is now off - assert hass.states.get(entity_id).state == STATE_OFF - - -@patch( - "zigpy.zcl.clusters.hvac.Fan.write_attributes", - new=AsyncMock(side_effect=ZigbeeException), -) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_fan_entity_failure_state( - hass: HomeAssistant, - device_fan_1, - device_fan_2, - coordinator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the fan entity for a ZHA group when writing attributes generates an exception.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_fan_1._zha_gateway = zha_gateway - device_fan_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee] - members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group) - assert len(entity_domains) == 2 - - assert Platform.LIGHT in entity_domains - assert Platform.FAN in entity_domains - - entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group) - assert hass.states.get(entity_id) is not None - - group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id] - - await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False) - await async_wait_for_updates(hass) - # test that the fans were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_fan_1, device_fan_2]) - await async_wait_for_updates(hass) - # test that the fan group entity was created and is off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - group_fan_cluster.write_attributes.reset_mock() - - with pytest.raises(HomeAssistantError): - await async_turn_on(hass, entity_id) - - await hass.async_block_till_done() - assert len(group_fan_cluster.write_attributes.mock_calls) == 1 - assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2} - - -@pytest.mark.parametrize( - ("plug_read", "expected_state", "expected_percentage"), - [ - (None, STATE_OFF, None), - ({"fan_mode": 0}, STATE_OFF, 0), - ({"fan_mode": 1}, STATE_ON, 33), - ({"fan_mode": 2}, STATE_ON, 66), - ({"fan_mode": 3}, STATE_ON, 100), - ], -) -async def test_fan_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, - plug_read, - expected_state, - expected_percentage, -) -> None: - """Test ZHA fan platform.""" - - cluster = zigpy_device.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = plug_read - - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == expected_state - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - - -async def test_fan_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, -) -> None: - """Test ZHA fan platform.""" - - cluster = zigpy_device.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 2 - else: - assert cluster.read_attributes.await_count == 4 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 5 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 33 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 6 - - -@pytest.fixture -def zigpy_device_ikea(zigpy_device_mock): - """Ikea fan zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - 64637, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - } - return zigpy_device_mock( - endpoints, - manufacturer="IKEA of Sweden", - model="STARKVIND Air purifier", - quirk=zhaquirks.ikea.starkvind.IkeaSTARKVIND, - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - -async def test_fan_ikea( - hass: HomeAssistant, - zha_device_joined_restored: ZHADevice, - zigpy_device_ikea: Device, -) -> None: - """Test ZHA fan Ikea platform.""" - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at fan - await send_attributes_report(hass, cluster, {6: 1}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at fan - await send_attributes_report(hass, cluster, {6: 0}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 1}, manufacturer=None) - ] - - # turn off from HA - cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 0}, manufacturer=None) - ] - - # change speed from HA - cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 10}, manufacturer=None) - ] - - # change preset_mode from HA - cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 1}, manufacturer=None) - ] - - # set invalid preset_mode from HA - cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError) as exc: - await async_set_preset_mode( - hass, entity_id, preset_mode="invalid does not exist" - ) - assert exc.value.translation_key == "not_valid_preset_mode" - assert len(cluster.write_attributes.mock_calls) == 0 - - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device_ikea, [cluster], (9,)) - - -@pytest.mark.parametrize( - ( - "ikea_plug_read", - "ikea_expected_state", - "ikea_expected_percentage", - "ikea_preset_mode", - ), - [ - (None, STATE_OFF, None, None), - ({"fan_mode": 0}, STATE_OFF, 0, None), - ({"fan_mode": 1}, STATE_ON, 10, PRESET_MODE_AUTO), - ({"fan_mode": 10}, STATE_ON, 20, "Speed 1"), - ({"fan_mode": 15}, STATE_ON, 30, "Speed 1.5"), - ({"fan_mode": 20}, STATE_ON, 40, "Speed 2"), - ({"fan_mode": 25}, STATE_ON, 50, "Speed 2.5"), - ({"fan_mode": 30}, STATE_ON, 60, "Speed 3"), - ({"fan_mode": 35}, STATE_ON, 70, "Speed 3.5"), - ({"fan_mode": 40}, STATE_ON, 80, "Speed 4"), - ({"fan_mode": 45}, STATE_ON, 90, "Speed 4.5"), - ({"fan_mode": 50}, STATE_ON, 100, "Speed 5"), - ], -) -async def test_fan_ikea_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_ikea, - ikea_plug_read, - ikea_expected_state, - ikea_expected_percentage, - ikea_preset_mode, -) -> None: - """Test ZHA fan platform.""" - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - cluster.PLUGGED_ATTR_READS = ikea_plug_read - - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == ikea_expected_state - assert ( - hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] - == ikea_expected_percentage - ) - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == ikea_preset_mode - - -async def test_fan_ikea_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_ikea, -) -> None: - """Test ZHA fan platform.""" - cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device_ikea) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 6 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 7 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 10 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is PRESET_MODE_AUTO - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 5 - else: - assert cluster.read_attributes.await_count == 8 - - -@pytest.fixture -def zigpy_device_kof(zigpy_device_mock): - """Fan by King of Fans zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.Groups.cluster_id, - general.Scenes.cluster_id, - 64637, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE, - SIG_EP_PROFILE: zha.PROFILE_ID, - }, - } - return zigpy_device_mock( - endpoints, - manufacturer="King Of Fans, Inc.", - model="HBUniversalCFRemote", - quirk=zhaquirks.kof.kof_mr101z.CeilingFan, - node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - ) - - -async def test_fan_kof( - hass: HomeAssistant, - zha_device_joined_restored: ZHADevice, - zigpy_device_kof: Device, -) -> None: - """Test ZHA fan platform for King of Fans.""" - zha_device = await zha_device_joined_restored(zigpy_device_kof) - cluster = zigpy_device_kof.endpoints.get(1).fan - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the fan was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at fan - await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at fan - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - cluster.write_attributes.reset_mock() - await async_turn_on(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 2}, manufacturer=None) - ] - - # turn off from HA - cluster.write_attributes.reset_mock() - await async_turn_off(hass, entity_id) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 0}, manufacturer=None) - ] - - # change speed from HA - cluster.write_attributes.reset_mock() - await async_set_percentage(hass, entity_id, percentage=100) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 4}, manufacturer=None) - ] - - # change preset_mode from HA - cluster.write_attributes.reset_mock() - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART) - assert cluster.write_attributes.mock_calls == [ - call({"fan_mode": 6}, manufacturer=None) - ] - - # set invalid preset_mode from HA - cluster.write_attributes.reset_mock() - with pytest.raises(NotValidPresetModeError) as exc: - await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO) - assert exc.value.translation_key == "not_valid_preset_mode" - assert len(cluster.write_attributes.mock_calls) == 0 - - # test adding new fan to the network and HA - await async_test_rejoin(hass, zigpy_device_kof, [cluster], (1,)) - - -@pytest.mark.parametrize( - ("plug_read", "expected_state", "expected_percentage", "expected_preset"), - [ - (None, STATE_OFF, None, None), - ({"fan_mode": 0}, STATE_OFF, 0, None), - ({"fan_mode": 1}, STATE_ON, 25, None), - ({"fan_mode": 2}, STATE_ON, 50, None), - ({"fan_mode": 3}, STATE_ON, 75, None), - ({"fan_mode": 4}, STATE_ON, 100, None), - ({"fan_mode": 6}, STATE_ON, None, PRESET_MODE_SMART), - ], -) -async def test_fan_kof_init( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_kof, - plug_read, - expected_state, - expected_percentage, - expected_preset, -) -> None: - """Test ZHA fan platform for King of Fans.""" - - cluster = zigpy_device_kof.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = plug_read - - zha_device = await zha_device_joined_restored(zigpy_device_kof) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == expected_state - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == expected_preset - - -async def test_fan_kof_update_entity( - hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device_kof, -) -> None: - """Test ZHA fan platform for King of Fans.""" - - cluster = zigpy_device_kof.endpoints.get(1).fan - cluster.PLUGGED_ATTR_READS = {"fan_mode": 0} - - zha_device = await zha_device_joined_restored(zigpy_device_kof) - entity_id = find_entity_id(Platform.FAN, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 2 - else: - assert cluster.read_attributes.await_count == 4 - - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_OFF - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 3 - else: - assert cluster.read_attributes.await_count == 5 - - cluster.PLUGGED_ATTR_READS = {"fan_mode": 1} - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - assert hass.states.get(entity_id).state == STATE_ON - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 25 - assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None - assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4 - if zha_device_joined_restored.name == "zha_device_joined": - assert cluster.read_attributes.await_count == 4 - else: - assert cluster.read_attributes.await_count == 6 diff --git a/tests/components/zha/test_gateway.py b/tests/components/zha/test_gateway.py deleted file mode 100644 index 3a576ed6e55..00000000000 --- a/tests/components/zha/test_gateway.py +++ /dev/null @@ -1,404 +0,0 @@ -"""Test ZHA Gateway.""" - -import asyncio -from unittest.mock import MagicMock, PropertyMock, patch - -import pytest -from zigpy.application import ControllerApplication -from zigpy.profiles import zha -import zigpy.types -from zigpy.zcl.clusters import general, lighting -import zigpy.zdo.types - -from homeassistant.components.zha.core.gateway import ZHAGateway -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant - -from .common import async_find_group_entity_id -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - -from tests.common import MockConfigEntry - -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" - - -@pytest.fixture -def zigpy_dev_basic(zigpy_device_mock): - """Zigpy device with just a basic cluster.""" - return zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - - -@pytest.fixture(autouse=True) -def required_platform_only(): - """Only set up the required and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.SENSOR, - Platform.LIGHT, - Platform.DEVICE_TRACKER, - Platform.NUMBER, - Platform.SELECT, - ), - ): - yield - - -@pytest.fixture -async def zha_dev_basic(hass, zha_device_restored, zigpy_dev_basic): - """ZHA device with just a basic cluster.""" - - return await zha_device_restored(zigpy_dev_basic) - - -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_device_left(hass: HomeAssistant, zigpy_dev_basic, zha_dev_basic) -> None: - """Device leaving the network should become unavailable.""" - - assert zha_dev_basic.available is True - - get_zha_gateway(hass).device_left(zigpy_dev_basic) - await hass.async_block_till_done() - assert zha_dev_basic.available is False - - -async def test_gateway_group_methods( - hass: HomeAssistant, device_light_1, device_light_2, coordinator -) -> None: - """Test creating a group with 2 members.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - - entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(entity_id) is not None - - # test get group by name - assert zha_group == zha_gateway.async_get_group_by_name(zha_group.name) - - # test removing a group - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - await hass.async_block_till_done() - - # we shouldn't have the group anymore - assert zha_gateway.async_get_group_by_name(zha_group.name) is None - - # the group entity should be cleaned up - assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) - - # test creating a group with 1 member - zha_group = await zha_gateway.async_create_zigpy_group( - "Test Group", [GroupMember(device_light_1.ieee, 1)] - ) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 1 - for member in zha_group.members: - assert member.device.ieee in [device_light_1.ieee] - - # the group entity should not have been cleaned up - assert entity_id not in hass.states.async_entity_ids(Platform.LIGHT) - - with patch("zigpy.zcl.Cluster.request", side_effect=TimeoutError): - await zha_group.members[0].async_remove_from_group() - assert len(zha_group.members) == 1 - for member in zha_group.members: - assert member.device.ieee in [device_light_1.ieee] - - -async def test_gateway_create_group_with_id( - hass: HomeAssistant, device_light_1, coordinator -) -> None: - """Test creating a group with a specific ID.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - - zha_group = await zha_gateway.async_create_zigpy_group( - "Test Group", [GroupMember(device_light_1.ieee, 1)], group_id=0x1234 - ) - await hass.async_block_till_done() - - assert len(zha_group.members) == 1 - assert zha_group.members[0].device is device_light_1 - assert zha_group.group_id == 0x1234 - - -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_devices", - MagicMock(), -) -@patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_load_groups", - MagicMock(), -) -@pytest.mark.parametrize( - ("device_path", "thread_state", "config_override"), - [ - ("/dev/ttyUSB0", True, {}), - ("socket://192.168.1.123:9999", False, {}), - ("socket://192.168.1.123:9999", True, {"use_thread": True}), - ], -) -async def test_gateway_initialize_bellows_thread( - device_path: str, - thread_state: bool, - config_override: dict, - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, -) -> None: - """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - data = dict(config_entry.data) - data["device"]["path"] = device_path - config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ) as mock_new: - await zha_gateway.async_initialize() - - assert mock_new.mock_calls[-1].kwargs["config"]["use_thread"] is thread_state - - await zha_gateway.shutdown() - - -@pytest.mark.parametrize( - ("device_path", "config_override", "expected_channel"), - [ - ("/dev/ttyUSB0", {}, None), - ("socket://192.168.1.123:9999", {}, None), - ("socket://192.168.1.123:9999", {"network": {"channel": 20}}, 20), - ("socket://core-silabs-multiprotocol:9999", {}, 15), - ("socket://core-silabs-multiprotocol:9999", {"network": {"channel": 20}}, 20), - ], -) -async def test_gateway_force_multi_pan_channel( - device_path: str, - config_override: dict, - expected_channel: int | None, - hass: HomeAssistant, - config_entry: MockConfigEntry, -) -> None: - """Test ZHA disabling the UART thread when connecting to a TCP coordinator.""" - data = dict(config_entry.data) - data["device"]["path"] = device_path - config_entry.add_to_hass(hass) - hass.config_entries.async_update_entry(config_entry, data=data) - - zha_gateway = ZHAGateway(hass, {"zigpy_config": config_override}, config_entry) - - _, config = zha_gateway.get_application_controller_data() - assert config["network"]["channel"] == expected_channel - - -async def test_single_reload_on_multiple_connection_loss( - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, -) -> None: - """Test that we only reload once when we lose the connection multiple times.""" - config_entry.add_to_hass(hass) - - zha_gateway = ZHAGateway(hass, {}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ): - await zha_gateway.async_initialize() - - with patch.object( - hass.config_entries, "async_reload", wraps=hass.config_entries.async_reload - ) as mock_reload: - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - zha_gateway.connection_lost(RuntimeError()) - - assert len(mock_reload.mock_calls) == 1 - - await hass.async_block_till_done() - - -@pytest.mark.parametrize("radio_concurrency", [1, 2, 8]) -async def test_startup_concurrency_limit( - radio_concurrency: int, - hass: HomeAssistant, - zigpy_app_controller: ControllerApplication, - config_entry: MockConfigEntry, - zigpy_device_mock, -) -> None: - """Test ZHA gateway limits concurrency on startup.""" - config_entry.add_to_hass(hass) - zha_gateway = ZHAGateway(hass, {}, config_entry) - - with patch( - "bellows.zigbee.application.ControllerApplication.new", - return_value=zigpy_app_controller, - ): - await zha_gateway.async_initialize() - - for i in range(50): - zigpy_dev = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=f"11:22:33:44:{i:08x}", - nwk=0x1234 + i, - ) - zigpy_dev.node_desc.mac_capability_flags |= ( - zigpy.zdo.types.NodeDescriptor.MACCapabilityFlags.MainsPowered - ) - - zha_gateway._async_get_or_create_device(zigpy_dev) - - # Keep track of request concurrency during initialization - current_concurrency = 0 - concurrencies = [] - - async def mock_send_packet(*args, **kwargs): - nonlocal current_concurrency - - current_concurrency += 1 - concurrencies.append(current_concurrency) - - await asyncio.sleep(0.001) - - current_concurrency -= 1 - concurrencies.append(current_concurrency) - - type(zha_gateway).radio_concurrency = PropertyMock(return_value=radio_concurrency) - assert zha_gateway.radio_concurrency == radio_concurrency - - with patch( - "homeassistant.components.zha.core.device.ZHADevice.async_initialize", - side_effect=mock_send_packet, - ): - await zha_gateway.async_fetch_updated_state_mains() - - await zha_gateway.shutdown() - - # Make sure concurrency was always limited - assert current_concurrency == 0 - assert min(concurrencies) == 0 - - if radio_concurrency > 1: - assert 1 <= max(concurrencies) < zha_gateway.radio_concurrency - else: - assert 1 == max(concurrencies) == zha_gateway.radio_concurrency diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index 0615fefd644..13c03c17cf7 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -1,81 +1,27 @@ """Tests for ZHA helpers.""" -import enum import logging -from unittest.mock import patch +from typing import Any import pytest import voluptuous_serialize -from zigpy.profiles import zha -from zigpy.quirks.v2.homeassistant import UnitOfPower as QuirksUnitOfPower from zigpy.types.basic import uint16_t -from zigpy.zcl.clusters import general, lighting +from zigpy.zcl.clusters import lighting -from homeassistant.components.zha.core.helpers import ( +from homeassistant.components.zha.helpers import ( cluster_command_schema_to_vol_schema, convert_to_zcl_values, - validate_unit, + exclude_none_values, ) -from homeassistant.const import Platform, UnitOfPower from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv -from .common import async_enable_traffic -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE - _LOGGER = logging.getLogger(__name__) -@pytest.fixture(autouse=True) -def light_platform_only(): - """Only set up the light and required base platforms to speed up tests.""" - with patch( - "homeassistant.components.zha.PLATFORMS", - ( - Platform.BUTTON, - Platform.LIGHT, - Platform.NUMBER, - Platform.SELECT, - ), - ): - yield - - -@pytest.fixture -async def device_light(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): - """Test light.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - } - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return color_cluster, zha_device - - -async def test_zcl_schema_conversions(hass: HomeAssistant, device_light) -> None: +async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: """Test ZHA ZCL schema conversion helpers.""" - color_cluster, zha_device = device_light - await async_enable_traffic(hass, [zha_device]) - command_schema = color_cluster.commands_by_name["color_loop_set"].schema + command_schema = lighting.Color.ServerCommandDefs.color_loop_set.schema expected_schema = [ { "type": "multi_select", @@ -215,23 +161,21 @@ async def test_zcl_schema_conversions(hass: HomeAssistant, device_light) -> None assert converted_data["update_flags"] == 0 -def test_unit_validation() -> None: - """Test unit validation.""" +@pytest.mark.parametrize( + ("obj", "expected_output"), + [ + ({"a": 1, "b": 2, "c": None}, {"a": 1, "b": 2}), + ({"a": 1, "b": 2, "c": 0}, {"a": 1, "b": 2, "c": 0}), + ({"a": 1, "b": 2, "c": ""}, {"a": 1, "b": 2, "c": ""}), + ({"a": 1, "b": 2, "c": False}, {"a": 1, "b": 2, "c": False}), + ], +) +def test_exclude_none_values( + obj: dict[str, Any], expected_output: dict[str, Any] +) -> None: + """Test exclude_none_values helper.""" + result = exclude_none_values(obj) + assert result == expected_output - assert validate_unit(QuirksUnitOfPower.WATT) == UnitOfPower.WATT - - class FooUnit(enum.Enum): - """Foo unit.""" - - BAR = "bar" - - class UnitOfMass(enum.Enum): - """UnitOfMass.""" - - BAR = "bar" - - with pytest.raises(KeyError): - validate_unit(FooUnit.BAR) - - with pytest.raises(ValueError): - validate_unit(UnitOfMass.BAR) + for key in expected_output: + assert expected_output[key] == obj[key] diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index 4d4956d3978..aa68d688799 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -9,14 +9,14 @@ from zigpy.application import ControllerApplication from zigpy.config import CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import TransientConnectionError -from homeassistant.components.zha.core.const import ( +from homeassistant.components.zha.const import ( CONF_BAUDRATE, CONF_FLOW_CONTROL, CONF_RADIO_TYPE, CONF_USB_PATH, DOMAIN, ) -from homeassistant.components.zha.core.helpers import get_zha_data +from homeassistant.components.zha.helpers import get_zha_data from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, MAJOR_VERSION, @@ -43,7 +43,7 @@ def disable_platform_only(): @pytest.fixture -def config_entry_v1(hass): +def config_entry_v1(hass: HomeAssistant): """Config entry version 1 fixture.""" return MockConfigEntry( domain=DOMAIN, @@ -139,7 +139,6 @@ async def test_config_depreciation(hass: HomeAssistant, zha_config) -> None: ("socket://[1.2.3.4]:5678 ", "socket://1.2.3.4:5678"), ], ) -@patch("homeassistant.components.zha.setup_quirks", Mock(return_value=True)) @patch( "homeassistant.components.zha.websocket_api.async_load_api", Mock(return_value=True) ) @@ -282,7 +281,7 @@ async def test_shutdown_on_ha_stop( zha_data = get_zha_data(hass) with patch.object( - zha_data.gateway, "shutdown", wraps=zha_data.gateway.shutdown + zha_data.gateway_proxy, "shutdown", wraps=zha_data.gateway_proxy.shutdown ) as mock_shutdown: hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) hass.set_state(CoreState.stopping) diff --git a/tests/components/zha/test_light.py b/tests/components/zha/test_light.py index a9d32362863..ef2714b3b58 100644 --- a/tests/components/zha/test_light.py +++ b/tests/components/zha/test_light.py @@ -1,12 +1,11 @@ """Test ZHA light.""" -from collections.abc import Callable -from datetime import timedelta -from typing import Any from unittest.mock import AsyncMock, call, patch, sentinel import pytest +from zha.application.platforms.light.const import FLASH_EFFECTS from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import general, lighting import zigpy.zcl.foundation as zcl_f @@ -16,41 +15,23 @@ from homeassistant.components.light import ( FLASH_SHORT, ColorMode, ) -from homeassistant.components.zha.core.const import ( - CONF_ALWAYS_PREFER_XY_COLOR_MODE, - CONF_GROUP_MEMBERS_ASSUME_STATE, - ZHA_OPTIONS, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.components.zha.light import FLASH_EFFECTS -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import ( - async_enable_traffic, - async_find_group_entity_id, async_shift_time, - async_test_rejoin, - async_wait_for_updates, find_entity_id, - patch_zha_config, send_attributes_report, update_attribute_cache, ) from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import ( - async_fire_time_changed, - async_mock_load_restore_state_from_storage, -) - -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9" -IEEE_GROUPABLE_DEVICE3 = "03:2d:6f:00:0a:90:69:e7" - LIGHT_ON_OFF = { 1: { SIG_EP_PROFILE: zha.PROFILE_ID, @@ -111,195 +92,6 @@ def light_platform_only(): yield -@pytest.fixture -async def coordinator(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_1(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE, - nwk=0xB79D, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - manufacturer="sengled", - nwk=0xC79E, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_light_3(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee=IEEE_GROUPABLE_DEVICE3, - nwk=0xB89F, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def eWeLink_light(hass, zigpy_device_mock, zha_device_joined): - """Mock eWeLink light.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - general.Groups.cluster_id, - general.Identify.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_PROFILE: zha.PROFILE_ID, - } - }, - ieee="03:2d:6f:00:0a:90:69:e3", - manufacturer="eWeLink", - nwk=0xB79D, - ) - color_cluster = zigpy_device.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature - | lighting.Color.ColorCapabilities.XY_attributes, - "color_temp_physical_min": 0, - "color_temp_physical_max": 0, - } - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_light_refresh( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -) -> None: - """Test ZHA light platform refresh.""" - - # create zigpy devices - zigpy_device = zigpy_device_mock(LIGHT_ON_OFF) - on_off_cluster = zigpy_device.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - on_off_cluster.read_attributes.reset_mock() - - # not enough time passed - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 0 - assert on_off_cluster.read_attributes.await_count == 0 - assert hass.states.get(entity_id).state == STATE_OFF - - # 1 interval - 1 call - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 1} - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 1 - assert on_off_cluster.read_attributes.await_count == 1 - assert hass.states.get(entity_id).state == STATE_ON - - # 2 intervals - 2 calls - on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0} - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80)) - await hass.async_block_till_done() - assert on_off_cluster.read_attributes.call_count == 2 - assert on_off_cluster.read_attributes.await_count == 2 - assert hass.states.get(entity_id).state == STATE_OFF - - @patch( "zigpy.zcl.clusters.lighting.Color.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), @@ -322,34 +114,42 @@ async def test_light_refresh( ) async def test_light( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, device, reporting, ) -> None: """Test ZHA light platform.""" - # create zigpy devices - zigpy_device = zigpy_device_mock(device) - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock(device) + cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) + + if cluster_color: + cluster_color.PLUGGED_ATTR_READS = { + "color_temperature": 100, + "color_temp_physical_min": 0, + "color_temp_physical_max": 600, + "color_capabilities": lighting.ColorCapabilities.XY_attributes + | lighting.ColorCapabilities.Color_temperature, + } + update_attribute_cache(cluster_color) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) assert entity_id is not None cluster_on_off = zigpy_device.endpoints[1].on_off cluster_level = getattr(zigpy_device.endpoints[1], "level", None) - cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None) cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None) - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the lights were created and that they are unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the lights were created and are off assert hass.states.get(entity_id).state == STATE_OFF # test turning the lights on and off from the light @@ -379,889 +179,6 @@ async def test_light( hass, cluster_level, entity_id, 150, STATE_ON ) - # test rejoin - await async_test_off_from_hass(hass, cluster_on_off, entity_id) - clusters = [c for c in (cluster_on_off, cluster_level, cluster_color) if c] - await async_test_rejoin(hass, zigpy_device, clusters, reporting) - - -@pytest.mark.parametrize( - ("plugged_attr_reads", "config_override", "expected_state"), - [ - # HS light without cached hue or saturation - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with cached hue - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_hue": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with cached saturation - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_saturation": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - # HS light with both - ( - { - "color_capabilities": ( - lighting.Color.ColorCapabilities.Hue_and_saturation - ), - "current_hue": 100, - "current_saturation": 100, - }, - {(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False}, - {}, - ), - ], -) -async def test_light_initialization( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined_restored, - plugged_attr_reads, - config_override, - expected_state, -) -> None: - """Test ZHA light initialization with cached attributes and color modes.""" - - # create zigpy devices - zigpy_device = zigpy_device_mock(LIGHT_COLOR) - - # mock attribute reads - zigpy_device.endpoints[1].light_color.PLUGGED_ATTR_READS = plugged_attr_reads - - with patch_zha_config("light", config_override): - zha_device = await zha_device_joined_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - - assert entity_id is not None - - # pylint: disable-next=fixme - # TODO ensure hue and saturation are properly set on startup - - -@patch( - "zigpy.zcl.clusters.lighting.Color.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.Identify.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.LevelControl.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "zigpy.zcl.clusters.general.OnOff.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -async def test_transitions( - hass: HomeAssistant, device_light_1, device_light_2, eWeLink_light, coordinator -) -> None: - """Test ZHA light transition code.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - eWeLink_light_entity_id = find_entity_id(Platform.LIGHT, eWeLink_light, hass) - assert device_1_entity_id != device_2_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off - eWeLink_cluster_on_off = eWeLink_light.device.endpoints[1].on_off - - dev1_cluster_level = device_light_1.device.endpoints[1].level - dev2_cluster_level = device_light_2.device.endpoints[1].level - eWeLink_cluster_level = eWeLink_light.device.endpoints[1].level - - dev1_cluster_color = device_light_1.device.endpoints[1].light_color - dev2_cluster_color = device_light_2.device.endpoints[1].light_color - eWeLink_cluster_color = eWeLink_light.device.endpoints[1].light_color - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_OFF - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - # first test 0 length transition with no color and no brightness provided - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_1_entity_id, "transition": 0}, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=254, # default "full on" brightness - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 254 - - # test 0 length transition with no color and no brightness provided again, but for "force on" lights - eWeLink_cluster_on_off.request.reset_mock() - eWeLink_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": eWeLink_light_entity_id, "transition": 0}, - blocking=True, - ) - assert eWeLink_cluster_on_off.request.call_count == 1 - assert eWeLink_cluster_on_off.request.await_count == 1 - assert eWeLink_cluster_on_off.request.call_args_list[0] == call( - False, - eWeLink_cluster_on_off.commands_by_name["on"].id, - eWeLink_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert eWeLink_cluster_color.request.call_count == 0 - assert eWeLink_cluster_color.request.await_count == 0 - assert eWeLink_cluster_level.request.call_count == 1 - assert eWeLink_cluster_level.request.await_count == 1 - assert eWeLink_cluster_level.request.call_args == call( - False, - eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - eWeLink_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=254, # default "full on" brightness - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - eWeLink_state = hass.states.get(eWeLink_light_entity_id) - assert eWeLink_state.state == STATE_ON - assert eWeLink_state.attributes["brightness"] == 254 - - eWeLink_cluster_on_off.request.reset_mock() - eWeLink_cluster_level.request.reset_mock() - - # test 0 length transition with brightness, but no color provided - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_level.request.reset_mock() - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_1_entity_id, "transition": 0, "brightness": 50}, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=50, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 50 - - dev1_cluster_level.request.reset_mock() - - # test non 0 length transition with color provided while light is on - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "transition": 3.5, - "brightness": 18, - "color_temp": 432, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=18, - transition_time=35, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=432, - transition_time=35, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 18 - assert light1_state.attributes["color_temp"] == 432 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # test 0 length transition to turn light off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - "transition": 0, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 1 - assert dev1_cluster_level.request.await_count == 1 - assert dev1_cluster_level.request.call_args == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=0, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_OFF - - dev1_cluster_level.request.reset_mock() - - # test non 0 length transition and color temp while turning light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 2 - assert dev1_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev1_cluster_level.request.call_args_list[0] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_level.request.call_args_list[1] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level"].id, - dev1_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 235 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test no transition provided and color temp while turning light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "brightness": 25, - "color_temp": 236, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 0 - assert dev1_cluster_on_off.request.await_count == 0 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 2 - assert dev1_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev1_cluster_level.request.call_args_list[0] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=236, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_level.request.call_args_list[1] == call( - False, - dev1_cluster_level.commands_by_name["move_to_level"].id, - dev1_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 236 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_level.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off to setup group test - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test no transition when the same color temp is provided from off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_1_entity_id, - "color_temp": 236, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 1 - assert dev1_cluster_color.request.await_count == 1 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - - assert dev1_cluster_on_off.request.call_args == call( - False, - dev1_cluster_on_off.commands_by_name["on"].id, - dev1_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=236, - transition_time=0, # no transition when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light1_state = hass.states.get(device_1_entity_id) - assert light1_state.state == STATE_ON - assert light1_state.attributes["brightness"] == 25 - assert light1_state.attributes["color_temp"] == 236 - assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - - # turn light 1 back off to setup group test - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_1_entity_id, - }, - blocking=True, - ) - assert dev1_cluster_on_off.request.call_count == 1 - assert dev1_cluster_on_off.request.await_count == 1 - assert dev1_cluster_color.request.call_count == 0 - assert dev1_cluster_color.request.await_count == 0 - assert dev1_cluster_level.request.call_count == 0 - assert dev1_cluster_level.request.await_count == 0 - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - dev1_cluster_on_off.request.reset_mock() - dev1_cluster_color.request.reset_mock() - dev1_cluster_level.request.reset_mock() - - # test sengled light uses default minimum transition time - dev2_cluster_on_off.request.reset_mock() - dev2_cluster_color.request.reset_mock() - dev2_cluster_level.request.reset_mock() - - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_2_entity_id, "transition": 0, "brightness": 100}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=100, - transition_time=1, # transition time - sengled light uses default minimum - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - assert light2_state.attributes["brightness"] == 100 - - dev2_cluster_level.request.reset_mock() - - # turn the sengled light back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_on_off.request.reset_mock() - - # test non 0 length transition and color temp while turning light on and sengled (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_2_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 1 - assert dev2_cluster_color.request.await_count == 1 - assert dev2_cluster_level.request.call_count == 2 - assert dev2_cluster_level.request.await_count == 2 - - # first it comes on with no transition at 2 brightness - assert dev2_cluster_level.request.call_args_list[0] == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=2, - transition_time=1, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev2_cluster_color.request.call_args == call( - False, - dev2_cluster_color.commands_by_name["move_to_color_temp"].id, - dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=1, # sengled transition == 1 when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev2_cluster_level.request.call_args_list[1] == call( - False, - dev2_cluster_level.commands_by_name["move_to_level"].id, - dev2_cluster_level.commands_by_name["move_to_level"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - assert light2_state.attributes["brightness"] == 25 - assert light2_state.attributes["color_temp"] == 235 - assert light2_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - dev2_cluster_level.request.reset_mock() - dev2_cluster_color.request.reset_mock() - - # turn the sengled light back off - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_on_off.request.reset_mock() - - # test non 0 length transition and color temp while turning group light on (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": group_entity_id, - "transition": 1, - "brightness": 25, - "color_temp": 235, - }, - blocking=True, - ) - - group_on_off_cluster_handler = zha_group.endpoint[general.OnOff.cluster_id] - group_level_cluster_handler = zha_group.endpoint[general.LevelControl.cluster_id] - group_color_cluster_handler = zha_group.endpoint[lighting.Color.cluster_id] - assert group_on_off_cluster_handler.request.call_count == 0 - assert group_on_off_cluster_handler.request.await_count == 0 - assert group_color_cluster_handler.request.call_count == 1 - assert group_color_cluster_handler.request.await_count == 1 - assert group_level_cluster_handler.request.call_count == 1 - assert group_level_cluster_handler.request.await_count == 1 - - # groups are omitted from the 3 call dance for new_color_provided_while_off - assert group_color_cluster_handler.request.call_args == call( - False, - dev2_cluster_color.commands_by_name["move_to_color_temp"].id, - dev2_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=10, # sengled transition == 1 when new_color_provided_while_off - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert group_level_cluster_handler.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=25, - transition_time=10, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_ON - assert group_state.attributes["brightness"] == 25 - assert group_state.attributes["color_temp"] == 235 - assert group_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - - group_on_off_cluster_handler.request.reset_mock() - group_color_cluster_handler.request.reset_mock() - group_level_cluster_handler.request.reset_mock() - - # turn the sengled light back on - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": device_2_entity_id, - }, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 1 - assert dev2_cluster_on_off.request.await_count == 1 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 0 - assert dev2_cluster_level.request.await_count == 0 - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - - dev2_cluster_on_off.request.reset_mock() - - # turn the light off with a transition - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_off", - {"entity_id": device_2_entity_id, "transition": 2}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=0, - transition_time=20, # transition time - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_OFF - - dev2_cluster_level.request.reset_mock() - - # turn the light back on with no args should use a transition and last known brightness - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - {"entity_id": device_2_entity_id}, - blocking=True, - ) - assert dev2_cluster_on_off.request.call_count == 0 - assert dev2_cluster_on_off.request.await_count == 0 - assert dev2_cluster_color.request.call_count == 0 - assert dev2_cluster_color.request.await_count == 0 - assert dev2_cluster_level.request.call_count == 1 - assert dev2_cluster_level.request.await_count == 1 - assert dev2_cluster_level.request.call_args == call( - False, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id, - dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema, - level=25, - transition_time=1, # transition time - sengled light uses default minimum - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - light2_state = hass.states.get(device_2_entity_id) - assert light2_state.state == STATE_ON - - dev2_cluster_level.request.reset_mock() - - # test eWeLink color temp while turning light on from off (new_color_provided_while_off) - await hass.services.async_call( - LIGHT_DOMAIN, - "turn_on", - { - "entity_id": eWeLink_light_entity_id, - "color_temp": 235, - }, - blocking=True, - ) - assert eWeLink_cluster_on_off.request.call_count == 1 - assert eWeLink_cluster_on_off.request.await_count == 1 - assert eWeLink_cluster_color.request.call_count == 1 - assert eWeLink_cluster_color.request.await_count == 1 - assert eWeLink_cluster_level.request.call_count == 0 - assert eWeLink_cluster_level.request.await_count == 0 - - # first it comes on - assert eWeLink_cluster_on_off.request.call_args_list[0] == call( - False, - eWeLink_cluster_on_off.commands_by_name["on"].id, - eWeLink_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert dev1_cluster_color.request.call_args == call( - False, - dev1_cluster_color.commands_by_name["move_to_color_temp"].id, - dev1_cluster_color.commands_by_name["move_to_color_temp"].schema, - color_temp_mireds=235, - transition_time=0, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - - eWeLink_state = hass.states.get(eWeLink_light_entity_id) - assert eWeLink_state.state == STATE_ON - assert eWeLink_state.attributes["color_temp"] == 235 - assert eWeLink_state.attributes["color_mode"] == ColorMode.COLOR_TEMP - assert eWeLink_state.attributes["min_mireds"] == 153 - assert eWeLink_state.attributes["max_mireds"] == 500 - @patch( "zigpy.zcl.clusters.lighting.Color.request", @@ -1275,13 +192,51 @@ async def test_transitions( "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None: +async def test_on_with_off_color( + hass: HomeAssistant, setup_zha, zigpy_device_mock +) -> None: """Test turning on the light and sending color commands before on/level commands for supporting lights.""" - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev1_cluster_level = device_light_1.device.endpoints[1].level - dev1_cluster_color = device_light_1.device.endpoints[1].light_color + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [ + general.OnOff.cluster_id, + general.LevelControl.cluster_id, + lighting.Color.cluster_id, + general.Groups.cluster_id, + general.Identify.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + nwk=0xB79D, + ) + + dev1_cluster_color = zigpy_device.endpoints[1].light_color + + dev1_cluster_color.PLUGGED_ATTR_READS = { + "color_capabilities": lighting.Color.ColorCapabilities.Color_temperature + | lighting.Color.ColorCapabilities.XY_attributes + } + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) + assert entity_id is not None + + device_1_entity_id = find_entity_id(Platform.LIGHT, zha_device_proxy, hass) + dev1_cluster_on_off = zigpy_device.endpoints[1].on_off + dev1_cluster_level = zigpy_device.endpoints[1].level # Execute_if_off will override the "enhanced turn on from an off-state" config option that's enabled here dev1_cluster_color.PLUGGED_ATTR_READS = { @@ -1403,28 +358,34 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None: assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP -async def async_test_on_off_from_light(hass, cluster, entity_id): +async def async_test_on_off_from_light( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 3}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_ON # turn off at light await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 3}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_OFF -async def async_test_on_from_light(hass, cluster, entity_id): +async def async_test_on_from_light( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from the light.""" # turn on at light await send_attributes_report(hass, cluster, {1: -1, 0: 1, 2: 2}) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == STATE_ON -async def async_test_on_off_from_hass(hass, cluster, entity_id): +async def async_test_on_off_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test on off functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -1445,7 +406,9 @@ async def async_test_on_off_from_hass(hass, cluster, entity_id): await async_test_off_from_hass(hass, cluster, entity_id) -async def async_test_off_from_hass(hass, cluster, entity_id): +async def async_test_off_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test turning off the light from Home Assistant.""" # turn off via UI @@ -1467,9 +430,9 @@ async def async_test_off_from_hass(hass, cluster, entity_id): async def async_test_level_on_off_from_hass( hass: HomeAssistant, - on_off_cluster, - level_cluster, - entity_id, + on_off_cluster: Cluster, + level_cluster: Cluster, + entity_id: str, expected_default_transition: int = 0, ): """Test on off functionality from hass.""" @@ -1549,13 +512,19 @@ async def async_test_level_on_off_from_hass( await async_test_off_from_hass(hass, on_off_cluster, entity_id) -async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected_state): +async def async_test_dimmer_from_light( + hass: HomeAssistant, + cluster: Cluster, + entity_id: str, + level: int, + expected_state: str, +): """Test dimmer functionality from the light.""" await send_attributes_report( hass, cluster, {1: level + 10, 0: level, 2: level - 10 or 22} ) - await async_wait_for_updates(hass) + await hass.async_block_till_done(wait_background_tasks=True) assert hass.states.get(entity_id).state == expected_state # hass uses None for brightness of 0 in state attributes if level == 0: @@ -1563,7 +532,9 @@ async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected assert hass.states.get(entity_id).attributes.get("brightness") == level -async def async_test_flash_from_hass(hass, cluster, entity_id, flash): +async def async_test_flash_from_hass( + hass: HomeAssistant, cluster: Cluster, entity_id: str, flash +): """Test flash functionality from hass.""" # turn on via UI cluster.request.reset_mock() @@ -1603,405 +574,23 @@ async def async_test_flash_from_hass(hass, cluster, entity_id, flash): "zigpy.zcl.clusters.general.OnOff.request", new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), ) -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_light_entity( +async def test_light_exception_on_creation( hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device_light_1, - device_light_2, - device_light_3, - coordinator, -) -> None: - """Test the light entity for a ZHA group.""" - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - device_3_entity_id = find_entity_id(Platform.LIGHT, device_light_3, hass) - - assert device_1_entity_id not in (device_2_entity_id, device_3_entity_id) - assert device_2_entity_id != device_3_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - assert device_3_entity_id not in zha_group.member_entity_ids - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - group_cluster_level = zha_group.endpoint[general.LevelControl.cluster_id] - group_cluster_identify = zha_group.endpoint[general.Identify.cluster_id] - - dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off - dev3_cluster_on_off = device_light_3.device.endpoints[1].on_off - - dev1_cluster_level = device_light_1.device.endpoints[1].level - - await async_enable_traffic( - hass, [device_light_1, device_light_2, device_light_3], enabled=False - ) - await async_wait_for_updates(hass) - # test that the lights were created and that they are unavailable - assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2, device_light_3]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - assert group_state.attributes["supported_color_modes"] == [ - ColorMode.COLOR_TEMP, - ColorMode.XY, - ] - # Light which is off has no color mode - assert group_state.attributes["color_mode"] is None - - # test turning the lights on and off from the HA - await async_test_on_off_from_hass(hass, group_cluster_on_off, group_entity_id) - - await async_shift_time(hass) - - # test short flashing the lights from the HA - await async_test_flash_from_hass( - hass, group_cluster_identify, group_entity_id, FLASH_SHORT - ) - - await async_shift_time(hass) - - # test turning the lights on and off from the light - await async_test_on_off_from_light(hass, dev1_cluster_on_off, group_entity_id) - - # test turning the lights on and off from the HA - await async_test_level_on_off_from_hass( - hass, - group_cluster_on_off, - group_cluster_level, - group_entity_id, - expected_default_transition=1, # a Sengled light is in that group and needs a minimum 0.1s transition - ) - - await async_shift_time(hass) - - # test getting a brightness change from the network - await async_test_on_from_light(hass, dev1_cluster_on_off, group_entity_id) - await async_test_dimmer_from_light( - hass, dev1_cluster_level, group_entity_id, 150, STATE_ON - ) - # Check state - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_ON - assert group_state.attributes["supported_color_modes"] == [ - ColorMode.COLOR_TEMP, - ColorMode.XY, - ] - assert group_state.attributes["color_mode"] == ColorMode.XY - - # test long flashing the lights from the HA - await async_test_flash_from_hass( - hass, group_cluster_identify, group_entity_id, FLASH_LONG - ) - - await async_shift_time(hass) - - assert len(zha_group.members) == 2 - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await hass.async_block_till_done() - - # test that group light is on - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await hass.async_block_till_done() - - # test that group light is still on - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group light is now off - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group light is now back on - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_ON - - # turn it off to test a new member add being tracked - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - # add a new member and test that his state is also tracked - await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert device_3_entity_id in zha_group.member_entity_ids - assert len(zha_group.members) == 3 - - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(device_3_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - # make the group have only 1 member and now there should be no entity - await zha_group.async_remove_members( - [GroupMember(device_light_2.ieee, 1), GroupMember(device_light_3.ieee, 1)] - ) - assert len(zha_group.members) == 1 - assert hass.states.get(group_entity_id) is None - assert device_2_entity_id not in zha_group.member_entity_ids - assert device_3_entity_id not in zha_group.member_entity_ids - - # make sure the entity registry entry is still there - assert entity_registry.async_get(group_entity_id) is not None - - # add a member back and ensure that the group entity was created again - await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)]) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert len(zha_group.members) == 2 - assert hass.states.get(group_entity_id).state == STATE_ON - - # add a 3rd member and ensure we still have an entity and we track the new one - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await send_attributes_report(hass, dev3_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - assert hass.states.get(group_entity_id).state == STATE_OFF - - # this will test that _reprobe_group is used correctly - await zha_group.async_add_members( - [GroupMember(device_light_2.ieee, 1), GroupMember(coordinator.ieee, 1)] - ) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - assert len(zha_group.members) == 4 - assert hass.states.get(group_entity_id).state == STATE_ON - - await zha_group.async_remove_members([GroupMember(coordinator.ieee, 1)]) - await hass.async_block_till_done() - assert hass.states.get(group_entity_id).state == STATE_ON - assert len(zha_group.members) == 3 - - # remove the group and ensure that there is no entity and that the entity registry is cleaned up - assert entity_registry.async_get(group_entity_id) is not None - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - assert hass.states.get(group_entity_id) is None - assert entity_registry.async_get(group_entity_id) is None - - -@patch( - "zigpy.zcl.clusters.general.OnOff.request", - new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]), -) -@patch( - "homeassistant.components.zha.light.ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_group_member_assume_state( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, + setup_zha, zigpy_device_mock, - zha_device_joined, - coordinator, - device_light_1, - device_light_2, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test the group members assume state function.""" - with patch_zha_config( - "light", {(ZHA_OPTIONS, CONF_GROUP_MEMBERS_ASSUME_STATE): True} - ): - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - zha_gateway.coordinator_zha_device = coordinator - coordinator._zha_gateway = zha_gateway - device_light_1._zha_gateway = zha_gateway - device_light_2._zha_gateway = zha_gateway - member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee] - members = [ - GroupMember(device_light_1.ieee, 1), - GroupMember(device_light_2.ieee, 1), - ] - - assert coordinator.is_coordinator - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 2 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass) - device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass) - - assert device_1_entity_id != device_2_entity_id - - group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group) - assert hass.states.get(group_entity_id) is not None - - assert device_1_entity_id in zha_group.member_entity_ids - assert device_2_entity_id in zha_group.member_entity_ids - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - - await async_enable_traffic( - hass, [device_light_1, device_light_2], enabled=False - ) - await async_wait_for_updates(hass) - # test that the lights were created and that they are unavailable - assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_light_1, device_light_2]) - await async_wait_for_updates(hass) - - # test that the lights were created and are off - group_state = hass.states.get(group_entity_id) - assert group_state.state == STATE_OFF - - group_cluster_on_off.request.reset_mock() - await async_shift_time(hass) - - # turn on via UI - await hass.services.async_call( - LIGHT_DOMAIN, "turn_on", {"entity_id": group_entity_id}, blocking=True - ) - - # members also instantly assume STATE_ON - assert hass.states.get(device_1_entity_id).state == STATE_ON - assert hass.states.get(device_2_entity_id).state == STATE_ON - assert hass.states.get(group_entity_id).state == STATE_ON - - # turn off via UI - await hass.services.async_call( - LIGHT_DOMAIN, "turn_off", {"entity_id": group_entity_id}, blocking=True - ) - - # members also instantly assume STATE_OFF - assert hass.states.get(device_1_entity_id).state == STATE_OFF - assert hass.states.get(device_2_entity_id).state == STATE_OFF - assert hass.states.get(group_entity_id).state == STATE_OFF - - # remove the group and ensure that there is no entity and that the entity registry is cleaned up - assert entity_registry.async_get(group_entity_id) is not None - await zha_gateway.async_remove_zigpy_group(zha_group.group_id) - assert hass.states.get(group_entity_id) is None - assert entity_registry.async_get(group_entity_id) is None - - -@pytest.mark.parametrize( - ("restored_state", "expected_state"), - [ - ( - STATE_ON, - { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": ColorMode.XY, # color_mode defaults to what the light supports when restored with ON state - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - }, - ), - ( - STATE_OFF, - { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": None, - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - }, - ), - ], -) -async def test_restore_light_state( - hass: HomeAssistant, - zigpy_device_mock, - core_rs: Callable[[str, Any, dict[str, Any]], None], - zha_device_restored, - restored_state: str, - expected_state: dict[str, Any], -) -> None: - """Test ZHA light restores without throwing an error when attributes are None.""" - - # restore state with None values - attributes = { - "brightness": None, - "off_with_transition": None, - "off_brightness": None, - "color_mode": None, - "color_temp": None, - "xy_color": None, - "hs_color": None, - "effect": None, - } - - entity_id = "light.fakemanufacturer_fakemodel_light" - core_rs( - entity_id, - state=restored_state, - attributes=attributes, - ) - await async_mock_load_restore_state_from_storage(hass) + """Test ZHA light entity creation exception.""" + await setup_zha() + gateway = get_zha_gateway(hass) zigpy_device = zigpy_device_mock(LIGHT_COLOR) - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.LIGHT, zha_device, hass) - assert entity_id is not None - assert hass.states.get(entity_id).state == restored_state + gateway.get_or_create_device(zigpy_device) + with patch( + "homeassistant.components.zha.light.Light.__init__", side_effect=Exception + ): + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - # compare actual restored state to expected state - for attribute, expected_value in expected_state.items(): - assert hass.states.get(entity_id).attributes.get(attribute) == expected_value + assert "Error while adding entity from entity data" in caplog.text diff --git a/tests/components/zha/test_lock.py b/tests/components/zha/test_lock.py index b16d7a31828..4e1d092af9b 100644 --- a/tests/components/zha/test_lock.py +++ b/tests/components/zha/test_lock.py @@ -3,27 +3,23 @@ from unittest.mock import patch import pytest -import zigpy.profiles.zha +from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import closures, general import zigpy.zcl.foundation as zcl_f from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN -from homeassistant.const import ( - STATE_LOCKED, - STATE_UNAVAILABLE, - STATE_UNLOCKED, - Platform, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) +from homeassistant.const import STATE_LOCKED, STATE_UNLOCKED, Platform from homeassistant.core import HomeAssistant -from .common import async_enable_traffic, find_entity_id, send_attributes_report -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE - -LOCK_DOOR = 0 -UNLOCK_DOOR = 1 -SET_PIN_CODE = 5 -CLEAR_PIN_CODE = 7 -SET_USER_STATUS = 9 +from .common import find_entity_id, send_attributes_report +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -40,48 +36,51 @@ def lock_platform_only(): yield -@pytest.fixture -async def lock(hass, zigpy_device_mock, zha_device_joined_restored): - """Lock cluster fixture.""" +async def test_lock(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: + """Test ZHA lock platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [closures.DoorLock.cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.DOOR_LOCK, + SIG_EP_TYPE: zha.DeviceType.DOOR_LOCK, + SIG_EP_PROFILE: zha.PROFILE_ID, } }, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].door_lock + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_lock(hass: HomeAssistant, lock) -> None: - """Test ZHA lock platform.""" - - zha_device, cluster = lock - entity_id = find_entity_id(Platform.LOCK, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.LOCK, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].door_lock assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNLOCKED - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the lock was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to unlocked assert hass.states.get(entity_id).state == STATE_UNLOCKED # set state to locked - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) + await send_attributes_report( + hass, + cluster, + {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Locked}, + ) assert hass.states.get(entity_id).state == STATE_LOCKED # set state to unlocked - await send_attributes_report(hass, cluster, {1: 0, 0: 2, 2: 3}) + await send_attributes_report( + hass, + cluster, + {closures.DoorLock.AttributeDefs.lock_state.id: closures.LockState.Unlocked}, + ) assert hass.states.get(entity_id).state == STATE_UNLOCKED # lock from HA @@ -103,7 +102,7 @@ async def test_lock(hass: HomeAssistant, lock) -> None: await async_disable_user_code(hass, cluster, entity_id) -async def async_lock(hass, cluster, entity_id): +async def async_lock(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -112,10 +111,13 @@ async def async_lock(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == LOCK_DOOR + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.lock_door.id + ) -async def async_unlock(hass, cluster, entity_id): +async def async_unlock(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test lock functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # lock via UI @@ -124,10 +126,13 @@ async def async_unlock(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == UNLOCK_DOOR + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.unlock_door.id + ) -async def async_set_user_code(hass, cluster, entity_id): +async def async_set_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test set lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -139,7 +144,10 @@ async def async_set_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_PIN_CODE + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_pin_code.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled assert ( @@ -148,7 +156,7 @@ async def async_set_user_code(hass, cluster, entity_id): assert cluster.request.call_args[0][6] == "13246579" -async def async_clear_user_code(hass, cluster, entity_id): +async def async_clear_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test clear lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -163,11 +171,14 @@ async def async_clear_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == CLEAR_PIN_CODE + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.clear_pin_code.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 -async def async_enable_user_code(hass, cluster, entity_id): +async def async_enable_user_code(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test enable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -182,12 +193,17 @@ async def async_enable_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_USER_STATUS + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_user_status.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Enabled -async def async_disable_user_code(hass, cluster, entity_id): +async def async_disable_user_code( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test disable lock code functionality from hass.""" with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]): # set lock code via service call @@ -202,6 +218,9 @@ async def async_disable_user_code(hass, cluster, entity_id): ) assert cluster.request.call_count == 1 assert cluster.request.call_args[0][0] is False - assert cluster.request.call_args[0][1] == SET_USER_STATUS + assert ( + cluster.request.call_args[0][1] + == closures.DoorLock.ServerCommandDefs.set_user_status.id + ) assert cluster.request.call_args[0][3] == 2 # user slot 3 => internal slot 2 assert cluster.request.call_args[0][4] == closures.DoorLock.UserStatus.Disabled diff --git a/tests/components/zha/test_logbook.py b/tests/components/zha/test_logbook.py index 19a6f9d359f..0b27cd095a9 100644 --- a/tests/components/zha/test_logbook.py +++ b/tests/components/zha/test_logbook.py @@ -3,10 +3,16 @@ from unittest.mock import patch import pytest +from zha.application.const import ZHA_EVENT import zigpy.profiles.zha from zigpy.zcl.clusters import general -from homeassistant.components.zha.core.const import ZHA_EVENT +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) from homeassistant.const import CONF_DEVICE_ID, CONF_UNIQUE_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -40,9 +46,13 @@ def sensor_platform_only(): @pytest.fixture -async def mock_devices(hass, zigpy_device_mock, zha_device_joined): +async def mock_devices(hass: HomeAssistant, setup_zha, zigpy_device_mock): """IAS device fixture.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { @@ -54,10 +64,13 @@ async def mock_devices(hass, zigpy_device_mock, zha_device_joined): } ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.update_available(True) - await hass.async_block_till_done() - return zigpy_device, zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + + return zigpy_device, zha_device_proxy async def test_zha_logbook_event_device_with_triggers( @@ -76,7 +89,7 @@ async def test_zha_logbook_event_device_with_triggers( (LONG_RELEASE, LONG_RELEASE): {COMMAND: COMMAND_HOLD}, } - ieee_address = str(zha_device.ieee) + ieee_address = str(zha_device.device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) @@ -153,7 +166,7 @@ async def test_zha_logbook_event_device_no_triggers( """Test ZHA logbook events with device and without triggers.""" zigpy_device, zha_device = mock_devices - ieee_address = str(zha_device.ieee) + ieee_address = str(zha_device.device.ieee) reg_device = device_registry.async_get_device(identifiers={("zha", ieee_address)}) hass.config.components.add("recorder") diff --git a/tests/components/zha/test_number.py b/tests/components/zha/test_number.py index 6b302f9cbd9..180f16e9ae2 100644 --- a/tests/components/zha/test_number.py +++ b/tests/components/zha/test_number.py @@ -3,26 +3,22 @@ from unittest.mock import call, patch import pytest -from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.zcl.clusters import general, lighting +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN -from homeassistant.components.zha.core.device import ZHADevice -from homeassistant.const import STATE_UNAVAILABLE, EntityCategory, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @@ -43,49 +39,28 @@ def number_platform_only(): yield -@pytest.fixture -def zigpy_analog_output_device(zigpy_device_mock): - """Zigpy analog_output device.""" - - endpoints = { - 1: { - SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, - SIG_EP_INPUT: [general.AnalogOutput.cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -async def light(zigpy_device_mock): - """Siren fixture.""" - - return zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - general.LevelControl.cluster_id, - lighting.Color.cluster_id, - ], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - -async def test_number( - hass: HomeAssistant, zha_device_joined_restored, zigpy_analog_output_device -) -> None: +async def test_number(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA number platform.""" - cluster = zigpy_analog_output_device.endpoints.get(1).analog_output + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_TYPE: zha.DeviceType.LEVEL_CONTROL_SWITCH, + SIG_EP_INPUT: [ + general.AnalogOutput.cluster_id, + general.Basic.cluster_id, + ], + SIG_EP_OUTPUT: [], + SIG_EP_PROFILE: zha.PROFILE_ID, + } + } + ) + + cluster = zigpy_device.endpoints[1].analog_output cluster.PLUGGED_ATTR_READS = { "max_present_value": 100.0, "min_present_value": 1.0, @@ -98,34 +73,14 @@ async def test_number( update_attribute_cache(cluster) cluster.PLUGGED_ATTR_READS["present_value"] = 15.0 - zha_device = await zha_device_joined_restored(zigpy_analog_output_device) - # one for present_value and one for the rest configuration attributes - assert cluster.read_attributes.call_count == 3 - attr_reads = set() - for call_args in cluster.read_attributes.call_args_list: - attr_reads |= set(call_args[0][0]) - assert "max_present_value" in attr_reads - assert "min_present_value" in attr_reads - assert "relinquish_default" in attr_reads - assert "resolution" in attr_reads - assert "description" in attr_reads - assert "engineering_units" in attr_reads - assert "application_type" in attr_reads + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - entity_id = find_entity_id(Platform.NUMBER, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.NUMBER, zha_device_proxy, hass) assert entity_id is not None - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the number was created and that it is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - assert cluster.read_attributes.call_count == 3 - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - assert cluster.read_attributes.call_count == 6 - - # test that the state has changed from unavailable to 15.0 assert hass.states.get(entity_id).state == "15.0" # test attributes @@ -134,13 +89,13 @@ async def test_number( assert hass.states.get(entity_id).attributes.get("step") == 1.1 assert hass.states.get(entity_id).attributes.get("icon") == "mdi:percent" assert hass.states.get(entity_id).attributes.get("unit_of_measurement") == "%" + assert ( hass.states.get(entity_id).attributes.get("friendly_name") == "FakeManufacturer FakeModel Number PWM1" ) # change value from device - assert cluster.read_attributes.call_count == 6 await send_attributes_report(hass, cluster, {0x0055: 15}) assert hass.states.get(entity_id).state == "15.0" @@ -165,16 +120,8 @@ async def test_number( ] cluster.PLUGGED_ATTR_READS["present_value"] = 30.0 - # test rejoin - assert cluster.read_attributes.call_count == 6 - await async_test_rejoin(hass, zigpy_analog_output_device, [cluster], (1,)) - assert hass.states.get(entity_id).state == "30.0" - assert cluster.read_attributes.call_count == 9 - # update device value with failed attribute report cluster.PLUGGED_ATTR_READS["present_value"] = 40.0 - # validate the entity still contains old value - assert hass.states.get(entity_id).state == "30.0" await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() @@ -183,251 +130,4 @@ async def test_number( "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == "40.0" - assert cluster.read_attributes.call_count == 10 assert "present_value" in cluster.read_attributes.call_args[0][0] - - -@pytest.mark.parametrize( - ("attr", "initial_value", "new_value"), - [ - ("on_off_transition_time", 20, 5), - ("on_level", 255, 50), - ("on_transition_time", 5, 1), - ("off_transition_time", 5, 1), - ("default_move_rate", 1, 5), - ("start_up_current_level", 254, 125), - ], -) -async def test_level_control_number( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - light: ZHADevice, - zha_device_joined, - attr: str, - initial_value: int, - new_value: int, -) -> None: - """Test ZHA level control number entities - new join.""" - level_control_cluster = light.endpoints[1].level - level_control_cluster.PLUGGED_ATTR_READS = { - attr: initial_value, - } - zha_device = await zha_device_joined(light) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - qualifier=attr, - ) - assert entity_id is not None - - assert level_control_cluster.read_attributes.mock_calls == [ - call( - [ - "on_off_transition_time", - "on_level", - "on_transition_time", - "off_transition_time", - "default_move_rate", - ], - allow_cache=True, - only_cache=False, - manufacturer=None, - ), - call( - ["start_up_current_level"], - allow_cache=True, - only_cache=False, - manufacturer=None, - ), - call( - [ - "current_level", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == str(initial_value) - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test number set_value - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert level_control_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None) - ] - - state = hass.states.get(entity_id) - assert state - assert state.state == str(new_value) - - level_control_cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert hass.states.get(entity_id).state == str(initial_value) - assert level_control_cluster.read_attributes.mock_calls == [ - call( - [attr], - allow_cache=False, - only_cache=False, - manufacturer=None, - ) - ] - - level_control_cluster.write_attributes.reset_mock() - level_control_cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert level_control_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - ] - assert hass.states.get(entity_id).state == str(initial_value) - - -@pytest.mark.parametrize( - ("attr", "initial_value", "new_value"), - [("start_up_color_temperature", 500, 350)], -) -async def test_color_number( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - light: ZHADevice, - zha_device_joined, - attr: str, - initial_value: int, - new_value: int, -) -> None: - """Test ZHA color number entities - new join.""" - color_cluster = light.endpoints[1].light_color - color_cluster.PLUGGED_ATTR_READS = { - attr: initial_value, - } - zha_device = await zha_device_joined(light) - - entity_id = find_entity_id( - Platform.NUMBER, - zha_device, - hass, - qualifier=attr, - ) - assert entity_id is not None - - assert color_cluster.read_attributes.call_count == 3 - assert ( - call( - [ - "color_temp_physical_min", - "color_temp_physical_max", - "color_capabilities", - "start_up_color_temperature", - "options", - ], - allow_cache=True, - only_cache=False, - manufacturer=None, - ) - in color_cluster.read_attributes.call_args_list - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == str(initial_value) - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test number set_value - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert color_cluster.write_attributes.call_count == 1 - assert color_cluster.write_attributes.call_args[0][0] == { - attr: new_value, - } - - state = hass.states.get(entity_id) - assert state - assert state.state == str(new_value) - - color_cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert hass.states.get(entity_id).state == str(initial_value) - assert color_cluster.read_attributes.call_count == 1 - assert ( - call( - [attr], - allow_cache=False, - only_cache=False, - manufacturer=None, - ) - in color_cluster.read_attributes.call_args_list - ) - - color_cluster.write_attributes.reset_mock() - color_cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - "number", - "set_value", - { - "entity_id": entity_id, - "value": new_value, - }, - blocking=True, - ) - - assert color_cluster.write_attributes.mock_calls == [ - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - call({attr: new_value}, manufacturer=None), - ] - assert hass.states.get(entity_id).state == str(initial_value) diff --git a/tests/components/zha/test_radio_manager.py b/tests/components/zha/test_radio_manager.py index 280b3d05daf..0a51aaa6dba 100644 --- a/tests/components/zha/test_radio_manager.py +++ b/tests/components/zha/test_radio_manager.py @@ -1,10 +1,11 @@ """Tests for ZHA config flow.""" +from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, create_autospec, patch import pytest import serial.tools.list_ports -from typing_extensions import Generator +from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config from zigpy.config import CONF_DEVICE_PATH @@ -12,7 +13,7 @@ import zigpy.types from homeassistant.components.usb import UsbServiceInfo from homeassistant.components.zha import radio_manager -from homeassistant.components.zha.core.const import DOMAIN, RadioType +from homeassistant.components.zha.const import DOMAIN from homeassistant.components.zha.radio_manager import ProbeResult, ZhaRadioManager from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/zha/test_registries.py b/tests/components/zha/test_registries.py deleted file mode 100644 index 2b1c0dcc561..00000000000 --- a/tests/components/zha/test_registries.py +++ /dev/null @@ -1,602 +0,0 @@ -"""Test ZHA registries.""" - -from __future__ import annotations - -from unittest import mock - -import pytest -from typing_extensions import Generator -import zigpy.quirks as zigpy_quirks - -from homeassistant.components.zha.binary_sensor import IASZone -from homeassistant.components.zha.core import registries -from homeassistant.components.zha.core.const import ATTR_QUIRK_ID -from homeassistant.components.zha.entity import ZhaEntity -from homeassistant.helpers import entity_registry as er - -MANUFACTURER = "mock manufacturer" -MODEL = "mock model" -QUIRK_CLASS = "mock.test.quirk.class" -QUIRK_ID = "quirk_id" - - -@pytest.fixture -def zha_device(): - """Return a mock of ZHA device.""" - dev = mock.MagicMock() - dev.manufacturer = MANUFACTURER - dev.model = MODEL - dev.quirk_class = QUIRK_CLASS - dev.quirk_id = QUIRK_ID - return dev - - -@pytest.fixture -def cluster_handlers(cluster_handler): - """Return a mock of cluster_handlers.""" - - return [cluster_handler("level", 8), cluster_handler("on_off", 6)] - - -@pytest.mark.parametrize( - ("rule", "matched"), - [ - (registries.MatchRule(), False), - (registries.MatchRule(cluster_handler_names={"level"}), True), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off"}), True), - (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), - ( - registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), - False, - ), - # test generic_id matching - (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), - (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} - ), - True, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - } - ), - False, - ), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - ), - True, - ), - # manufacturer matching - (registries.MatchRule(manufacturers="no match"), False), - (registries.MatchRule(manufacturers=MANUFACTURER), True), - ( - registries.MatchRule( - manufacturers="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - ( - registries.MatchRule( - manufacturers=MANUFACTURER, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - (registries.MatchRule(models=MODEL), True), - (registries.MatchRule(models="no match"), False), - ( - registries.MatchRule( - models=MODEL, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - ( - registries.MatchRule( - models="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - (registries.MatchRule(quirk_ids=QUIRK_ID), True), - (registries.MatchRule(quirk_ids="no match"), False), - ( - registries.MatchRule( - quirk_ids=QUIRK_ID, aux_cluster_handlers="aux_cluster_handler" - ), - True, - ), - ( - registries.MatchRule( - quirk_ids="no match", aux_cluster_handlers="aux_cluster_handler" - ), - False, - ), - # match everything - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - manufacturers=MANUFACTURER, - models=MODEL, - quirk_ids=QUIRK_ID, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers={"random manuf", MANUFACTURER}, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers={"random manuf", "Another manuf"}, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers=lambda x: x == MANUFACTURER, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - manufacturers=lambda x: x != MANUFACTURER, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models={"random model", MODEL} - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models={"random model", "Another model"} - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models=lambda x: x == MODEL - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", models=lambda x: x != MODEL - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - quirk_ids={"random quirk", QUIRK_ID}, - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", - quirk_ids={"random quirk", "another quirk"}, - ), - False, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", quirk_ids=lambda x: x == QUIRK_ID - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names="on_off", quirk_ids=lambda x: x != QUIRK_ID - ), - False, - ), - ( - registries.MatchRule(cluster_handler_names="on_off", quirk_ids=QUIRK_ID), - True, - ), - ], -) -def test_registry_matching(rule, matched, cluster_handlers) -> None: - """Test strict rule matching.""" - assert ( - rule.strict_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched - ) - - -@pytest.mark.parametrize( - ("rule", "matched"), - [ - (registries.MatchRule(), False), - (registries.MatchRule(cluster_handler_names={"level"}), True), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off"}), True), - (registries.MatchRule(cluster_handler_names={"on_off", "no match"}), False), - (registries.MatchRule(cluster_handler_names={"on_off", "level"}), True), - ( - registries.MatchRule(cluster_handler_names={"on_off", "level", "no match"}), - False, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, models="no match" - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, - models="no match", - manufacturers="no match", - ), - True, - ), - ( - registries.MatchRule( - cluster_handler_names={"on_off", "level"}, - models="no match", - manufacturers=MANUFACTURER, - ), - True, - ), - # test generic_id matching - (registries.MatchRule(generic_ids={"cluster_handler_0x0006"}), True), - (registries.MatchRule(generic_ids={"cluster_handler_0x0008"}), True), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"} - ), - True, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - } - ), - False, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - }, - models="mo match", - ), - False, - ), - ( - registries.MatchRule( - generic_ids={ - "cluster_handler_0x0006", - "cluster_handler_0x0008", - "cluster_handler_0x0009", - }, - models=MODEL, - ), - True, - ), - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - ), - True, - ), - # manufacturer matching - (registries.MatchRule(manufacturers="no match"), False), - (registries.MatchRule(manufacturers=MANUFACTURER), True), - (registries.MatchRule(models=MODEL), True), - (registries.MatchRule(models="no match"), False), - (registries.MatchRule(quirk_ids=QUIRK_ID), True), - (registries.MatchRule(quirk_ids="no match"), False), - # match everything - ( - registries.MatchRule( - generic_ids={"cluster_handler_0x0006", "cluster_handler_0x0008"}, - cluster_handler_names={"on_off", "level"}, - manufacturers=MANUFACTURER, - models=MODEL, - quirk_ids=QUIRK_ID, - ), - True, - ), - ], -) -def test_registry_loose_matching(rule, matched, cluster_handlers) -> None: - """Test loose rule matching.""" - assert ( - rule.loose_matched(MANUFACTURER, MODEL, cluster_handlers, QUIRK_ID) is matched - ) - - -def test_match_rule_claim_cluster_handlers_color(cluster_handler) -> None: - """Test cluster handler claiming.""" - ch_color = cluster_handler("color", 0x300) - ch_level = cluster_handler("level", 8) - ch_onoff = cluster_handler("on_off", 6) - - rule = registries.MatchRule( - cluster_handler_names="on_off", aux_cluster_handlers={"color", "level"} - ) - claimed = rule.claim_cluster_handlers([ch_color, ch_level, ch_onoff]) - assert {"color", "level", "on_off"} == {ch.name for ch in claimed} - - -@pytest.mark.parametrize( - ("rule", "match"), - [ - (registries.MatchRule(cluster_handler_names={"level"}), {"level"}), - (registries.MatchRule(cluster_handler_names={"level", "no match"}), {"level"}), - (registries.MatchRule(cluster_handler_names={"on_off"}), {"on_off"}), - (registries.MatchRule(generic_ids="cluster_handler_0x0000"), {"basic"}), - ( - registries.MatchRule( - cluster_handler_names="level", generic_ids="cluster_handler_0x0000" - ), - {"basic", "level"}, - ), - ( - registries.MatchRule(cluster_handler_names={"level", "power"}), - {"level", "power"}, - ), - ( - registries.MatchRule( - cluster_handler_names={"level", "on_off"}, - aux_cluster_handlers={"basic", "power"}, - ), - {"basic", "level", "on_off", "power"}, - ), - (registries.MatchRule(cluster_handler_names={"color"}), set()), - ], -) -def test_match_rule_claim_cluster_handlers( - rule, match, cluster_handler, cluster_handlers -) -> None: - """Test cluster handler claiming.""" - ch_basic = cluster_handler("basic", 0) - cluster_handlers.append(ch_basic) - ch_power = cluster_handler("power", 1) - cluster_handlers.append(ch_power) - - claimed = rule.claim_cluster_handlers(cluster_handlers) - assert match == {ch.name for ch in claimed} - - -@pytest.fixture -def entity_registry(): - """Registry fixture.""" - return registries.ZHAEntityRegistry() - - -@pytest.mark.parametrize( - ("manufacturer", "model", "quirk_id", "match_name"), - [ - ("random manufacturer", "random model", "random.class", "OnOff"), - ("random manufacturer", MODEL, "random.class", "OnOffModel"), - (MANUFACTURER, "random model", "random.class", "OnOffManufacturer"), - ("random manufacturer", "random model", QUIRK_ID, "OnOffQuirk"), - (MANUFACTURER, MODEL, "random.class", "OnOffModelManufacturer"), - (MANUFACTURER, "some model", "random.class", "OnOffMultimodel"), - ], -) -def test_weighted_match( - cluster_handler, - entity_registry: er.EntityRegistry, - manufacturer, - model, - quirk_id, - match_name, -) -> None: - """Test weightedd match.""" - - s = mock.sentinel - - @entity_registry.strict_match( - s.component, - cluster_handler_names="on_off", - models={MODEL, "another model", "some model"}, - ) - class OnOffMultimodel: - pass - - @entity_registry.strict_match(s.component, cluster_handler_names="on_off") - class OnOff: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", manufacturers=MANUFACTURER - ) - class OnOffManufacturer: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", models=MODEL - ) - class OnOffModel: - pass - - @entity_registry.strict_match( - s.component, - cluster_handler_names="on_off", - models=MODEL, - manufacturers=MANUFACTURER, - ) - class OnOffModelManufacturer: - pass - - @entity_registry.strict_match( - s.component, cluster_handler_names="on_off", quirk_ids=QUIRK_ID - ) - class OnOffQuirk: - pass - - ch_on_off = cluster_handler("on_off", 6) - ch_level = cluster_handler("level", 8) - - match, claimed = entity_registry.get_entity( - s.component, manufacturer, model, [ch_on_off, ch_level], quirk_id - ) - - assert match.__name__ == match_name - assert claimed == [ch_on_off] - - -def test_multi_sensor_match( - cluster_handler, entity_registry: er.EntityRegistry -) -> None: - """Test multi-entity match.""" - - s = mock.sentinel - - @entity_registry.multipass_match( - s.binary_sensor, - cluster_handler_names="smartenergy_metering", - ) - class SmartEnergySensor2: - pass - - ch_se = cluster_handler("smartenergy_metering", 0x0702) - ch_illuminati = cluster_handler("illuminance", 0x0401) - - match, claimed = entity_registry.get_multi_entity( - "manufacturer", - "model", - cluster_handlers=[ch_se, ch_illuminati], - quirk_id="quirk_id", - ) - - assert s.binary_sensor in match - assert s.component not in match - assert set(claimed) == {ch_se} - assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { - SmartEnergySensor2.__name__ - } - - @entity_registry.multipass_match( - s.component, - cluster_handler_names="smartenergy_metering", - aux_cluster_handlers="illuminance", - ) - class SmartEnergySensor1: - pass - - @entity_registry.multipass_match( - s.binary_sensor, - cluster_handler_names="smartenergy_metering", - aux_cluster_handlers="illuminance", - ) - class SmartEnergySensor3: - pass - - match, claimed = entity_registry.get_multi_entity( - "manufacturer", - "model", - cluster_handlers={ch_se, ch_illuminati}, - quirk_id="quirk_id", - ) - - assert s.binary_sensor in match - assert s.component in match - assert set(claimed) == {ch_se, ch_illuminati} - assert {cls.entity_class.__name__ for cls in match[s.binary_sensor]} == { - SmartEnergySensor2.__name__, - SmartEnergySensor3.__name__, - } - assert {cls.entity_class.__name__ for cls in match[s.component]} == { - SmartEnergySensor1.__name__ - } - - -def iter_all_rules() -> Generator[tuple[registries.MatchRule, list[type[ZhaEntity]]]]: - """Iterate over all match rules and their corresponding entities.""" - - for rules in registries.ZHA_ENTITIES._strict_registry.values(): - for rule, entity in rules.items(): - yield rule, [entity] - - for rules in registries.ZHA_ENTITIES._multi_entity_registry.values(): - for multi in rules.values(): - for rule, entities in multi.items(): - yield rule, entities - - for rules in registries.ZHA_ENTITIES._config_diagnostic_entity_registry.values(): - for multi in rules.values(): - for rule, entities in multi.items(): - yield rule, entities - - -def test_quirk_classes() -> None: - """Make sure that all quirk IDs in components matches exist.""" - - def quirk_class_validator(value): - """Validate quirk IDs during self test.""" - if callable(value): - # Callables cannot be tested - return - - if isinstance(value, (frozenset, set, list)): - for v in value: - # Unpack the value if needed - quirk_class_validator(v) - return - - if value not in all_quirk_ids: - raise ValueError(f"Quirk ID '{value}' does not exist.") - - # get all quirk ID from zigpy quirks registry - all_quirk_ids = [] - for manufacturer in zigpy_quirks._DEVICE_REGISTRY._registry.values(): - for model_quirk_list in manufacturer.values(): - for quirk in model_quirk_list: - quirk_id = getattr(quirk, ATTR_QUIRK_ID, None) - if quirk_id is not None and quirk_id not in all_quirk_ids: - all_quirk_ids.append(quirk_id) - # pylint: disable-next=undefined-loop-variable - del quirk, model_quirk_list, manufacturer - - # validate all quirk IDs used in component match rules - for rule, _ in iter_all_rules(): - quirk_class_validator(rule.quirk_ids) - - -def test_entity_names() -> None: - """Make sure that all handlers expose entities with valid names.""" - - for _, entity_classes in iter_all_rules(): - for entity_class in entity_classes: - if hasattr(entity_class, "__attr_name"): - # The entity has a name - assert (name := entity_class.__attr_name) and isinstance(name, str) - elif hasattr(entity_class, "__attr_translation_key"): - assert ( - isinstance(entity_class.__attr_translation_key, str) - and entity_class.__attr_translation_key - ) - elif hasattr(entity_class, "__attr_device_class"): - assert entity_class.__attr_device_class - else: - # The only exception (for now) is IASZone - assert entity_class is IASZone diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index c093fe266bd..c2925161748 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -16,7 +16,7 @@ from homeassistant.components.homeassistant_sky_connect.const import ( # pylint DOMAIN as SKYCONNECT_DOMAIN, ) from homeassistant.components.repairs import DOMAIN as REPAIRS_DOMAIN -from homeassistant.components.zha.core.const import DOMAIN +from homeassistant.components.zha.const import DOMAIN from homeassistant.components.zha.repairs.network_settings_inconsistent import ( ISSUE_INCONSISTENT_NETWORK_SETTINGS, ) @@ -148,7 +148,7 @@ async def test_multipan_firmware_repair( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), patch( @@ -199,7 +199,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -236,7 +236,7 @@ async def test_multipan_firmware_retry_on_probe_ezsp( autospec=True, ), patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -311,7 +311,7 @@ async def test_inconsistent_settings_keep_new( old_state = network_backup with patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, @@ -390,7 +390,7 @@ async def test_inconsistent_settings_restore_old( old_state = network_backup with patch( - "homeassistant.components.zha.core.gateway.ZHAGateway.async_initialize", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, diff --git a/tests/components/zha/test_select.py b/tests/components/zha/test_select.py index 70f58ee4e6d..f0f742503e3 100644 --- a/tests/components/zha/test_select.py +++ b/tests/components/zha/test_select.py @@ -1,34 +1,30 @@ """Test ZHA select entities.""" -from typing import Any -from unittest.mock import call, patch +from unittest.mock import patch import pytest -from zhaquirks import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zigpy.const import SIG_EP_PROFILE +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha -from zigpy.quirks import CustomCluster, CustomDevice -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -import zigpy.types as t from zigpy.zcl.clusters import general, security -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster -from homeassistant.components.zha.select import AqaraMotionSensitivities -from homeassistant.const import STATE_UNKNOWN, EntityCategory, Platform -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, restore_state -from homeassistant.util import dt as dt_util +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import ( + STATE_UNAVAILABLE, + STATE_UNKNOWN, + EntityCategory, + Platform, +) +from homeassistant.core import HomeAssistant, State +from homeassistant.helpers import entity_registry as er -from .common import async_enable_traffic, find_entity_id, send_attributes_report -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id -from tests.common import async_mock_load_restore_state_from_storage +from tests.common import mock_restore_cache @pytest.fixture(autouse=True) @@ -50,9 +46,17 @@ def select_select_only(): yield -@pytest.fixture -async def siren(hass, zigpy_device_mock, zha_device_joined_restored): - """Siren fixture.""" +async def test_select( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha, + zigpy_device_mock, +) -> None: + """Test ZHA select platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { @@ -62,75 +66,16 @@ async def siren(hass, zigpy_device_mock, zha_device_joined_restored): SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].ias_wd - - -@pytest.fixture -async def light(hass, zigpy_device_mock): - """Siren fixture.""" - - return zigpy_device_mock( - { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.Identify.cluster_id, - general.OnOff.cluster_id, - ], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - } - }, - node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - ) - - -@pytest.fixture -def core_rs(hass_storage: dict[str, Any]): - """Core.restore_state fixture.""" - - def _storage(entity_id, state): - now = dt_util.utcnow().isoformat() - - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], } + ) - return _storage + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_select( - hass: HomeAssistant, entity_registry: er.EntityRegistry, siren -) -> None: - """Test ZHA select platform.""" - zha_device, cluster = siren - assert cluster is not None + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier="tone", + Platform.SELECT, zha_device_proxy, hass, qualifier="tone" ) assert entity_id is not None @@ -167,17 +112,32 @@ async def test_select( assert state.state == security.IasWd.Warning.WarningMode.Burglar.name +@pytest.mark.parametrize( + ("restored_state", "expected_state"), + [ + # Unavailable is not restored + (STATE_UNAVAILABLE, STATE_UNKNOWN), + # Normal state is + ( + security.IasWd.Warning.WarningMode.Burglar.name, + security.IasWd.Warning.WarningMode.Burglar.name, + ), + ], +) async def test_select_restore_state( hass: HomeAssistant, + entity_registry: er.EntityRegistry, + setup_zha, zigpy_device_mock, - core_rs, - zha_device_restored, + restored_state: str, + expected_state: str, ) -> None: - """Test ZHA select entity restore state.""" - + """Test ZHA select platform restore state.""" entity_id = "select.fakemanufacturer_fakemodel_default_siren_tone" - core_rs(entity_id, state="Burglar") - await async_mock_load_restore_state_from_storage(hass) + + mock_restore_cache(hass, [State(entity_id, restored_state)]) + + await setup_zha() zigpy_device = zigpy_device_mock( { @@ -187,307 +147,14 @@ async def test_select_restore_state( SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, - ) - - zha_device = await zha_device_restored(zigpy_device) - cluster = zigpy_device.endpoints[1].ias_wd - assert cluster is not None - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier="tone", - ) - - assert entity_id is not None - state = hass.states.get(entity_id) - assert state - assert state.state == security.IasWd.Warning.WarningMode.Burglar.name - - -async def test_on_off_select_new_join( - hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_joined -) -> None: - """Test ZHA on off select - new join.""" - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = { - "start_up_on_off": general.OnOff.StartUpOnOff.On - } - zha_device = await zha_device_joined(light) - select_name = "start_up_behavior" - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name, - ) - assert entity_id is not None - - assert on_off_cluster.read_attributes.call_count == 2 - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.On.name - - assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - # Test select option with string value - await hass.services.async_call( - "select", - "select_option", - { - "entity_id": entity_id, - "option": general.OnOff.StartUpOnOff.Off.name, - }, - blocking=True, - ) - - assert on_off_cluster.write_attributes.call_count == 1 - assert on_off_cluster.write_attributes.call_args[0][0] == { - "start_up_on_off": general.OnOff.StartUpOnOff.Off - } - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.Off.name - - -async def test_on_off_select_restored( - hass: HomeAssistant, entity_registry: er.EntityRegistry, light, zha_device_restored -) -> None: - """Test ZHA on off select - restored.""" - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.PLUGGED_ATTR_READS = { - "start_up_on_off": general.OnOff.StartUpOnOff.On - } - zha_device = await zha_device_restored(light) - - assert zha_device.is_mains_powered - - assert on_off_cluster.read_attributes.call_count == 4 - # first 2 calls hit cache only - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=True, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=True, only_cache=True, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - # 2nd set of calls can actually read from the device - assert ( - call(["start_up_on_off"], allow_cache=True, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - assert ( - call(["on_off"], allow_cache=False, only_cache=False, manufacturer=None) - in on_off_cluster.read_attributes.call_args_list - ) - - select_name = "start_up_behavior" - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name, - ) - assert entity_id is not None - - state = hass.states.get(entity_id) - assert state - assert state.state == general.OnOff.StartUpOnOff.On.name - assert state.attributes["options"] == ["Off", "On", "Toggle", "PreviousValue"] - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - - -async def test_on_off_select_unsupported( - hass: HomeAssistant, light, zha_device_joined_restored -) -> None: - """Test ZHA on off select unsupported.""" - - on_off_cluster = light.endpoints[1].on_off - on_off_cluster.add_unsupported_attribute("start_up_on_off") - zha_device = await zha_device_joined_restored(light) - select_name = general.OnOff.StartUpOnOff.__name__ - entity_id = find_entity_id( - Platform.SELECT, - zha_device, - hass, - qualifier=select_name.lower(), - ) - assert entity_id is None - - -class MotionSensitivityQuirk(CustomDevice): - """Quirk with motion sensitivity attribute.""" - - class OppleCluster(CustomCluster, ManufacturerSpecificCluster): - """Aqara manufacturer specific cluster.""" - - cluster_id = 0xFCC0 - ep_attribute = "opple_cluster" - attributes = { - 0x010C: ("motion_sensitivity", t.uint8_t, True), - 0x020C: ("motion_sensitivity_disabled", t.uint8_t, True), } - - def __init__(self, *args, **kwargs): - """Initialize.""" - super().__init__(*args, **kwargs) - # populate cache to create config entity - self._attr_cache.update( - { - 0x010C: AqaraMotionSensitivities.Medium, - 0x020C: AqaraMotionSensitivities.Medium, - } - ) - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - INPUT_CLUSTERS: [general.Basic.cluster_id, OppleCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def zigpy_device_aqara_sensor(hass, zigpy_device_mock, zha_device_joined): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="LUMI", - model="lumi.motion.ac02", - quirk=MotionSensitivityQuirk, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zigpy_device + gateway = get_zha_gateway(hass) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_on_off_select_attribute_report( - hass: HomeAssistant, light, zha_device_restored, zigpy_device_aqara_sensor -) -> None: - """Test ZHA attribute report parsing for select platform.""" - - zha_device = await zha_device_restored(zigpy_device_aqara_sensor) - cluster = zigpy_device_aqara_sensor.endpoints.get(1).opple_cluster - entity_id = find_entity_id(Platform.SELECT, zha_device, hass) - assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state is in default medium state - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name - - # send attribute report from device - await send_attributes_report( - hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} - ) - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name - - -( - add_to_registry_v2("Fake_Manufacturer", "Fake_Model") - .replaces(MotionSensitivityQuirk.OppleCluster) - .enum( - "motion_sensitivity", - AqaraMotionSensitivities, - MotionSensitivityQuirk.OppleCluster.cluster_id, - ) - .enum( - "motion_sensitivity_disabled", - AqaraMotionSensitivities, - MotionSensitivityQuirk.OppleCluster.cluster_id, - translation_key="motion_sensitivity", - initially_disabled=True, - ) -) - - -@pytest.fixture -async def zigpy_device_aqara_sensor_v2( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - MotionSensitivityQuirk.OppleCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="Fake_Manufacturer", - model="Fake_Model", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].opple_cluster - - -async def test_on_off_select_attribute_report_v2( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - zigpy_device_aqara_sensor_v2, -) -> None: - """Test ZHA attribute report parsing for select platform.""" - - zha_device, cluster = zigpy_device_aqara_sensor_v2 - assert isinstance(zha_device.device, CustomDeviceV2) - entity_id = find_entity_id( - Platform.SELECT, zha_device, hass, qualifier="motion_sensitivity" - ) - assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state is in default medium state - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Medium.name - - # send attribute report from device - await send_attributes_report( - hass, cluster, {"motion_sensitivity": AqaraMotionSensitivities.Low} - ) - assert hass.states.get(entity_id).state == AqaraMotionSensitivities.Low.name - - entity_entry = entity_registry.async_get(entity_id) - assert entity_entry - assert entity_entry.entity_category == EntityCategory.CONFIG - assert entity_entry.disabled is False - assert entity_entry.translation_key == "motion_sensitivity" + state = hass.states.get(entity_id) + assert state + assert state.state == expected_state diff --git a/tests/components/zha/test_sensor.py b/tests/components/zha/test_sensor.py index 8443c4ced07..2d69cf1ff36 100644 --- a/tests/components/zha/test_sensor.py +++ b/tests/components/zha/test_sensor.py @@ -1,33 +1,20 @@ """Test ZHA sensor.""" -from collections.abc import Callable -from datetime import timedelta -import math -from typing import Any -from unittest.mock import MagicMock, patch +from unittest.mock import patch import pytest -from zhaquirks.danfoss import thermostat as danfoss_thermostat -import zigpy.profiles.zha -from zigpy.quirks import CustomCluster -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -from zigpy.quirks.v2.homeassistant import UnitOfMass -import zigpy.types as t +from zigpy.profiles import zha +from zigpy.zcl import Cluster from zigpy.zcl.clusters import general, homeautomation, hvac, measurement, smartenergy from zigpy.zcl.clusters.hvac import Thermostat -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.components.zha.core import ZHADevice -from homeassistant.components.zha.core.const import ZHA_CLUSTER_HANDLER_READS_PER_REQ -import homeassistant.config as config_util +from homeassistant.components.zha.helpers import get_zha_gateway from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_UNIT_OF_MEASUREMENT, - CONF_UNIT_SYSTEM, LIGHT_LUX, PERCENTAGE, - STATE_UNAVAILABLE, STATE_UNKNOWN, Platform, UnitOfApparentPower, @@ -37,29 +24,12 @@ from homeassistant.const import ( UnitOfPower, UnitOfPressure, UnitOfTemperature, - UnitOfVolume, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er, restore_state -from homeassistant.helpers.entity_component import async_update_entity -from homeassistant.util import dt as dt_util -from .common import ( - async_enable_traffic, - async_test_rejoin, - find_entity_id, - find_entity_ids, - send_attribute_report, - send_attributes_report, -) +from .common import send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import ( - MockConfigEntry, - async_fire_time_changed, - async_mock_load_restore_state_from_storage, -) - ENTITY_ID_PREFIX = "sensor.fakemanufacturer_fakemodel_{}" @@ -76,60 +46,19 @@ def sensor_platform_only(): yield -@pytest.fixture -async def elec_measurement_zigpy_dev(hass: HomeAssistant, zigpy_device_mock): - """Electric Measurement zigpy device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - homeautomation.ElectricalMeasurement.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - zigpy_device.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS = { - "ac_current_divisor": 10, - "ac_current_multiplier": 1, - "ac_power_divisor": 10, - "ac_power_multiplier": 1, - "ac_voltage_divisor": 10, - "ac_voltage_multiplier": 1, - "measurement_type": 8, - "power_divisor": 10, - "power_multiplier": 1, - } - return zigpy_device - - -@pytest.fixture -async def elec_measurement_zha_dev(elec_measurement_zigpy_dev, zha_device_joined): - """Electric Measurement ZHA device.""" - - zha_dev = await zha_device_joined(elec_measurement_zigpy_dev) - zha_dev.available = True - return zha_dev - - -async def async_test_humidity(hass: HomeAssistant, cluster, entity_id): +async def async_test_humidity(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test humidity sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 100}) assert_state(hass, entity_id, "10.0", PERCENTAGE) -async def async_test_temperature(hass: HomeAssistant, cluster, entity_id): +async def async_test_temperature(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 2900, 2: 100}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_pressure(hass: HomeAssistant, cluster, entity_id): +async def async_test_pressure(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test pressure sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 1000, 2: 10000}) assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) @@ -138,7 +67,7 @@ async def async_test_pressure(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "1000", UnitOfPressure.HPA) -async def async_test_illuminance(hass: HomeAssistant, cluster, entity_id): +async def async_test_illuminance(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test illuminance sensor.""" await send_attributes_report(hass, cluster, {1: 1, 0: 10, 2: 20}) assert_state(hass, entity_id, "1", LIGHT_LUX) @@ -150,7 +79,7 @@ async def async_test_illuminance(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "unknown", LIGHT_LUX) -async def async_test_metering(hass: HomeAssistant, cluster, entity_id): +async def async_test_metering(hass: HomeAssistant, cluster: Cluster, entity_id: str): """Test Smart Energy metering sensor.""" await send_attributes_report(hass, cluster, {1025: 1, 1024: 12345, 1026: 100}) assert_state(hass, entity_id, "12345.0", None) @@ -159,13 +88,14 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): await send_attributes_report(hass, cluster, {1024: 12346, "status": 64 + 8}) assert_state(hass, entity_id, "12346.0", None) + assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|POWER_FAILURE", "POWER_FAILURE|SERVICE_DISCONNECT", ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 1} + hass, cluster, {"metering_device_type": 1, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|NOT_DEFINED", @@ -173,7 +103,7 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 2} + hass, cluster, {"metering_device_type": 2, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|PIPE_EMPTY", @@ -181,7 +111,7 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): ) await send_attributes_report( - hass, cluster, {"status": 64 + 8, "metering_device_type": 5} + hass, cluster, {"metering_device_type": 5, "status": 64 + 8} ) assert hass.states.get(entity_id).attributes["status"] in ( "SERVICE_DISCONNECT|TEMPERATURE_SENSOR", @@ -190,13 +120,13 @@ async def async_test_metering(hass: HomeAssistant, cluster, entity_id): # Status for other meter types await send_attributes_report( - hass, cluster, {"status": 32, "metering_device_type": 4} + hass, cluster, {"metering_device_type": 4, "status": 32} ) assert hass.states.get(entity_id).attributes["status"] in ("", "32") async def async_test_smart_energy_summation_delivered( - hass: HomeAssistant, cluster, entity_id + hass: HomeAssistant, cluster: Cluster, entity_id: str ): """Test SmartEnergy Summation delivered sensor.""" @@ -213,7 +143,7 @@ async def async_test_smart_energy_summation_delivered( async def async_test_smart_energy_summation_received( - hass: HomeAssistant, cluster, entity_id + hass: HomeAssistant, cluster: Cluster, entity_id: str ): """Test SmartEnergy Summation received sensor.""" @@ -229,7 +159,9 @@ async def async_test_smart_energy_summation_received( ) -async def async_test_electrical_measurement(hass: HomeAssistant, cluster, entity_id): +async def async_test_electrical_measurement( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -248,10 +180,12 @@ async def async_test_electrical_measurement(hass: HomeAssistant, cluster, entity assert "active_power_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050D: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["active_power_max"] == "8.8" + assert hass.states.get(entity_id).attributes["active_power_max"] == 8.8 -async def async_test_em_apparent_power(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_apparent_power( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement Apparent Power sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -269,7 +203,9 @@ async def async_test_em_apparent_power(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "9.9", UnitOfApparentPower.VOLT_AMPERE) -async def async_test_em_power_factor(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_power_factor( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement Power Factor sensor.""" # update divisor cached value await send_attributes_report(hass, cluster, {"ac_power_divisor": 1}) @@ -287,7 +223,9 @@ async def async_test_em_power_factor(hass: HomeAssistant, cluster, entity_id): assert_state(hass, entity_id, "99", PERCENTAGE) -async def async_test_em_rms_current(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_rms_current( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement RMS Current sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0508: 1234, 10: 1000}) @@ -302,10 +240,12 @@ async def async_test_em_rms_current(hass: HomeAssistant, cluster, entity_id): assert "rms_current_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x050A: 88, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_current_max"] == "8.8" + assert hass.states.get(entity_id).attributes["rms_current_max"] == 8.8 -async def async_test_em_rms_voltage(hass: HomeAssistant, cluster, entity_id): +async def async_test_em_rms_voltage( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test electrical measurement RMS Voltage sensor.""" await send_attributes_report(hass, cluster, {0: 1, 0x0505: 1234, 10: 1000}) @@ -320,10 +260,12 @@ async def async_test_em_rms_voltage(hass: HomeAssistant, cluster, entity_id): assert "rms_voltage_max" not in hass.states.get(entity_id).attributes await send_attributes_report(hass, cluster, {0: 1, 0x0507: 888, 10: 5000}) - assert hass.states.get(entity_id).attributes["rms_voltage_max"] == "8.9" + assert hass.states.get(entity_id).attributes["rms_voltage_max"] == 8.9 -async def async_test_powerconfiguration(hass: HomeAssistant, cluster, entity_id): +async def async_test_powerconfiguration( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: 98}) assert_state(hass, entity_id, "49", "%") @@ -334,7 +276,9 @@ async def async_test_powerconfiguration(hass: HomeAssistant, cluster, entity_id) assert hass.states.get(entity_id).attributes["battery_voltage"] == 2.0 -async def async_test_powerconfiguration2(hass: HomeAssistant, cluster, entity_id): +async def async_test_powerconfiguration2( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test powerconfiguration/battery sensor.""" await send_attributes_report(hass, cluster, {33: -1}) assert_state(hass, entity_id, STATE_UNKNOWN, "%") @@ -346,13 +290,17 @@ async def async_test_powerconfiguration2(hass: HomeAssistant, cluster, entity_id assert_state(hass, entity_id, "49", "%") -async def async_test_device_temperature(hass: HomeAssistant, cluster, entity_id): +async def async_test_device_temperature( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test temperature sensor.""" await send_attributes_report(hass, cluster, {0: 2900}) assert_state(hass, entity_id, "29.0", UnitOfTemperature.CELSIUS) -async def async_test_setpoint_change_source(hass, cluster, entity_id): +async def async_test_setpoint_change_source( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test the translation of numerical state into enum text.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.setpoint_change_source.id: 0x01} @@ -361,7 +309,9 @@ async def async_test_setpoint_change_source(hass, cluster, entity_id): assert hass_state.state == "Schedule" -async def async_test_pi_heating_demand(hass, cluster, entity_id): +async def async_test_pi_heating_demand( + hass: HomeAssistant, cluster: Cluster, entity_id: str +): """Test pi heating demand is correctly returned.""" await send_attributes_report( hass, cluster, {Thermostat.AttributeDefs.pi_heating_demand.id: 1} @@ -568,8 +518,8 @@ async def async_test_pi_heating_demand(hass, cluster, entity_id): ) async def test_sensor( hass: HomeAssistant, + setup_zha, zigpy_device_mock, - zha_device_joined_restored, cluster_id, entity_suffix, test_func, @@ -580,14 +530,18 @@ async def test_sensor( ) -> None: """Test ZHA sensor platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( { 1: { SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, } - } + }, ) cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] if unsupported_attrs: @@ -600,26 +554,27 @@ async def test_sensor( # this one is mains powered zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 cluster.PLUGGED_ATTR_READS = read_plug - zha_device = await zha_device_joined_restored(zigpy_device) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) entity_id = ENTITY_ID_PREFIX.format(entity_suffix) - await async_enable_traffic(hass, [zha_device], enabled=False) - await hass.async_block_till_done() - # ensure the sensor entity was created - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + } + } + ) - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensor now have their correct initial state (mostly unknown) assert hass.states.get(entity_id).state == initial_sensor_state # test sensor associated logic await test_func(hass, cluster, entity_id) - # test rejoin - await async_test_rejoin(hass, zigpy_device, [cluster], (report_count,)) - def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): """Check that the state is what is expected. @@ -630,748 +585,3 @@ def assert_state(hass: HomeAssistant, entity_id, state, unit_of_measurement): hass_state = hass.states.get(entity_id) assert hass_state.state == state assert hass_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == unit_of_measurement - - -@pytest.fixture -def hass_ms(hass: HomeAssistant) -> Callable[[str], HomeAssistant]: - """Hass instance with measurement system.""" - - async def _hass_ms(meas_sys: str) -> HomeAssistant: - await config_util.async_process_ha_core_config( - hass, {CONF_UNIT_SYSTEM: meas_sys} - ) - await hass.async_block_till_done() - return hass - - return _hass_ms - - -@pytest.fixture -def core_rs(hass_storage: dict[str, Any]): - """Core.restore_state fixture.""" - - def _storage(entity_id, uom, state): - now = dt_util.utcnow().isoformat() - - hass_storage[restore_state.STORAGE_KEY] = { - "version": restore_state.STORAGE_VERSION, - "key": restore_state.STORAGE_KEY, - "data": [ - { - "state": { - "entity_id": entity_id, - "state": str(state), - "attributes": {ATTR_UNIT_OF_MEASUREMENT: uom}, - "last_changed": now, - "last_updated": now, - "context": { - "id": "3c2243ff5f30447eb12e7348cfd5b8ff", - "user_id": None, - }, - }, - "last_seen": now, - } - ], - } - - return _storage - - -@pytest.mark.parametrize( - ("uom", "raw_temp", "expected", "restore"), - [ - (UnitOfTemperature.CELSIUS, 2900, 29, False), - (UnitOfTemperature.CELSIUS, 2900, 29, True), - (UnitOfTemperature.FAHRENHEIT, 2900, 84, False), - (UnitOfTemperature.FAHRENHEIT, 2900, 84, True), - ], -) -async def test_temp_uom( - hass: HomeAssistant, - uom: UnitOfTemperature, - raw_temp: int, - expected: int, - restore: bool, - hass_ms: Callable[[str], HomeAssistant], - core_rs, - zigpy_device_mock, - zha_device_restored, -) -> None: - """Test ZHA temperature sensor unit of measurement.""" - - entity_id = "sensor.fake1026_fakemodel1026_004f3202_temperature" - if restore: - core_rs(entity_id, uom, state=(expected - 2)) - await async_mock_load_restore_state_from_storage(hass) - - hass = await hass_ms("metric" if uom == UnitOfTemperature.CELSIUS else "imperial") - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - measurement.TemperatureMeasurement.cluster_id, - general.Basic.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].temperature - zha_device = await zha_device_restored(zigpy_device) - entity_id = find_entity_id(Platform.SENSOR, zha_device, hass) - - if not restore: - await async_enable_traffic(hass, [zha_device], enabled=False) - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensors now have a state of unknown - if not restore: - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - await send_attribute_report(hass, cluster, 0, raw_temp) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state is not None - assert round(float(state.state)) == expected - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == uom - - -@patch( - "zigpy.zcl.ClusterPersistingListener", - MagicMock(), -) -async def test_electrical_measurement_init( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, -) -> None: - """Test proper initialization of the electrical measurement cluster.""" - - cluster_id = homeautomation.ElectricalMeasurement.cluster_id - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] - zha_device = await zha_device_joined(zigpy_device) - entity_id = "sensor.fakemanufacturer_fakemodel_power" - - # allow traffic to flow through the gateway and devices - await async_enable_traffic(hass, [zha_device]) - - # test that the sensor now have a state of unknown - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - await send_attributes_report(hass, cluster, {0: 1, 1291: 100, 10: 1000}) - assert int(hass.states.get(entity_id).state) == 100 - - cluster_handler = zha_device._endpoints[1].all_cluster_handlers["1:0x0b04"] - assert cluster_handler.ac_power_divisor == 1 - assert cluster_handler.ac_power_multiplier == 1 - - # update power divisor - await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0403: 5, 10: 1000}) - assert cluster_handler.ac_power_divisor == 5 - assert cluster_handler.ac_power_multiplier == 1 - assert hass.states.get(entity_id).state == "4.0" - - await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0605: 10, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 1 - assert hass.states.get(entity_id).state == "3.0" - - # update power multiplier - await send_attributes_report(hass, cluster, {0: 1, 1291: 20, 0x0402: 6, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 6 - assert hass.states.get(entity_id).state == "12.0" - - await send_attributes_report(hass, cluster, {0: 1, 1291: 30, 0x0604: 20, 10: 1000}) - assert cluster_handler.ac_power_divisor == 10 - assert cluster_handler.ac_power_multiplier == 20 - assert hass.states.get(entity_id).state == "60.0" - - -@pytest.mark.parametrize( - ("cluster_id", "unsupported_attributes", "entity_ids", "missing_entity_ids"), - [ - ( - homeautomation.ElectricalMeasurement.cluster_id, - {"apparent_power", "rms_voltage", "rms_current"}, - { - "power", - "ac_frequency", - "power_factor", - }, - { - "apparent_power", - "voltage", - "current", - }, - ), - ( - homeautomation.ElectricalMeasurement.cluster_id, - {"apparent_power", "rms_current", "ac_frequency", "power_factor"}, - {"voltage", "power"}, - { - "apparent_power", - "current", - "ac_frequency", - "power_factor", - }, - ), - ( - homeautomation.ElectricalMeasurement.cluster_id, - set(), - { - "voltage", - "power", - "apparent_power", - "current", - "ac_frequency", - "power_factor", - }, - set(), - ), - ( - smartenergy.Metering.cluster_id, - { - "instantaneous_demand", - }, - { - "summation_delivered", - }, - { - "instantaneous_demand", - }, - ), - ( - smartenergy.Metering.cluster_id, - {"instantaneous_demand", "current_summ_delivered"}, - {}, - { - "instantaneous_demand", - "summation_delivered", - }, - ), - ( - smartenergy.Metering.cluster_id, - {}, - { - "instantaneous_demand", - "summation_delivered", - }, - {}, - ), - ], -) -async def test_unsupported_attributes_sensor( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined_restored, - cluster_id, - unsupported_attributes, - entity_ids, - missing_entity_ids, -) -> None: - """Test ZHA sensor platform.""" - - entity_ids = {ENTITY_ID_PREFIX.format(e) for e in entity_ids} - missing_entity_ids = {ENTITY_ID_PREFIX.format(e) for e in missing_entity_ids} - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [cluster_id, general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.ON_OFF_SWITCH, - } - } - ) - cluster = zigpy_device.endpoints[1].in_clusters[cluster_id] - if cluster_id == smartenergy.Metering.cluster_id: - # this one is mains powered - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - for attr in unsupported_attributes: - cluster.add_unsupported_attribute(attr) - zha_device = await zha_device_joined_restored(zigpy_device) - - await async_enable_traffic(hass, [zha_device], enabled=False) - await hass.async_block_till_done() - present_entity_ids = set(find_entity_ids(Platform.SENSOR, zha_device, hass)) - assert present_entity_ids == entity_ids - assert missing_entity_ids not in present_entity_ids - - -@pytest.mark.parametrize( - ("raw_uom", "raw_value", "expected_state", "expected_uom"), - [ - ( - 1, - 12320, - "1.23", - UnitOfVolume.CUBIC_METERS, - ), - ( - 1, - 1232000, - "123.2", - UnitOfVolume.CUBIC_METERS, - ), - ( - 3, - 2340, - "0.65", - UnitOfVolume.CUBIC_METERS, - ), - ( - 3, - 2360, - "0.68", - UnitOfVolume.CUBIC_METERS, - ), - ( - 8, - 23660, - "2.37", - UnitOfPressure.KPA, - ), - ( - 0, - 9366, - "0.937", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 999, - "0.1", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 10091, - "1.009", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 10099, - "1.01", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 100999, - "10.1", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 100023, - "10.002", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 0, - 102456, - "10.246", - UnitOfEnergy.KILO_WATT_HOUR, - ), - ( - 5, - 102456, - "10.25", - "IMP gal", - ), - ( - 7, - 50124, - "5.01", - UnitOfVolume.LITERS, - ), - ], -) -async def test_se_summation_uom( - hass: HomeAssistant, - zigpy_device_mock, - zha_device_joined, - raw_uom, - raw_value, - expected_state, - expected_uom, -) -> None: - """Test ZHA smart energy summation.""" - - entity_id = ENTITY_ID_PREFIX.format("summation_delivered") - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - smartenergy.Metering.cluster_id, - general.Basic.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SIMPLE_SENSOR, - } - } - ) - zigpy_device.node_desc.mac_capability_flags |= 0b_0000_0100 - - cluster = zigpy_device.endpoints[1].in_clusters[smartenergy.Metering.cluster_id] - for attr in ("instanteneous_demand",): - cluster.add_unsupported_attribute(attr) - cluster.PLUGGED_ATTR_READS = { - "current_summ_delivered": raw_value, - "demand_formatting": 0xF9, - "divisor": 10000, - "metering_device_type": 0x00, - "multiplier": 1, - "status": 0x00, - "summation_formatting": 0b1_0111_010, - "unit_of_measure": raw_uom, - } - await zha_device_joined(zigpy_device) - - assert_state(hass, entity_id, expected_state, expected_uom) - - -@pytest.mark.parametrize( - ("raw_measurement_type", "expected_type"), - [ - (1, "ACTIVE_MEASUREMENT"), - (8, "PHASE_A_MEASUREMENT"), - (9, "ACTIVE_MEASUREMENT, PHASE_A_MEASUREMENT"), - ( - 15, - ( - "ACTIVE_MEASUREMENT, REACTIVE_MEASUREMENT, APPARENT_MEASUREMENT," - " PHASE_A_MEASUREMENT" - ), - ), - ], -) -async def test_elec_measurement_sensor_type( - hass: HomeAssistant, - elec_measurement_zigpy_dev, - raw_measurement_type, - expected_type, - zha_device_joined, -) -> None: - """Test ZHA electrical measurement sensor type.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zigpy_dev = elec_measurement_zigpy_dev - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS[ - "measurement_type" - ] = raw_measurement_type - - await zha_device_joined(zigpy_dev) - - state = hass.states.get(entity_id) - assert state is not None - assert state.attributes["measurement_type"] == expected_type - - -async def test_elec_measurement_sensor_polling( - hass: HomeAssistant, - elec_measurement_zigpy_dev, - zha_device_joined_restored, -) -> None: - """Test ZHA electrical measurement sensor polling.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zigpy_dev = elec_measurement_zigpy_dev - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( - 20 - ) - - await zha_device_joined_restored(zigpy_dev) - - # test that the sensor has an initial state of 2.0 - state = hass.states.get(entity_id) - assert state.state == "2.0" - - # update the value for the power reading - zigpy_dev.endpoints[1].electrical_measurement.PLUGGED_ATTR_READS["active_power"] = ( - 60 - ) - - # ensure the state is still 2.0 - state = hass.states.get(entity_id) - assert state.state == "2.0" - - # let the polling happen - future = dt_util.utcnow() + timedelta(seconds=90) - async_fire_time_changed(hass, future) - await hass.async_block_till_done(wait_background_tasks=True) - - # ensure the state has been updated to 6.0 - state = hass.states.get(entity_id) - assert state.state == "6.0" - - -@pytest.mark.parametrize( - "supported_attributes", - [ - set(), - { - "active_power", - "active_power_max", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - }, - { - "active_power", - }, - { - "active_power", - "active_power_max", - }, - { - "rms_current", - "rms_current_max", - }, - { - "rms_voltage", - "rms_voltage_max", - }, - ], -) -async def test_elec_measurement_skip_unsupported_attribute( - hass: HomeAssistant, - elec_measurement_zha_dev, - supported_attributes, -) -> None: - """Test ZHA electrical measurement skipping update of unsupported attributes.""" - - entity_id = ENTITY_ID_PREFIX.format("power") - zha_dev = elec_measurement_zha_dev - - cluster = zha_dev.device.endpoints[1].electrical_measurement - - all_attrs = { - "active_power", - "active_power_max", - "apparent_power", - "rms_current", - "rms_current_max", - "rms_voltage", - "rms_voltage_max", - "power_factor", - "ac_frequency", - "ac_frequency_max", - } - for attr in all_attrs - supported_attributes: - cluster.add_unsupported_attribute(attr) - cluster.read_attributes.reset_mock() - - await async_update_entity(hass, entity_id) - await hass.async_block_till_done() - assert cluster.read_attributes.call_count == math.ceil( - len(supported_attributes) / ZHA_CLUSTER_HANDLER_READS_PER_REQ - ) - read_attrs = { - a for call in cluster.read_attributes.call_args_list for a in call[0][0] - } - assert read_attrs == supported_attributes - - -class OppleCluster(CustomCluster, ManufacturerSpecificCluster): - """Aqara manufacturer specific cluster.""" - - cluster_id = 0xFCC0 - ep_attribute = "opple_cluster" - attributes = { - 0x010C: ("last_feeding_size", t.uint16_t, True), - } - - def __init__(self, *args, **kwargs) -> None: - """Initialize.""" - super().__init__(*args, **kwargs) - # populate cache to create config entity - self._attr_cache.update({0x010C: 10}) - - -( - add_to_registry_v2("Fake_Manufacturer_sensor", "Fake_Model_sensor") - .replaces(OppleCluster) - .sensor( - "last_feeding_size", - OppleCluster.cluster_id, - divisor=1, - multiplier=1, - unit=UnitOfMass.GRAMS, - ) -) - - -@pytest.fixture -async def zigpy_device_aqara_sensor_v2( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy Aqara motion sensor device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - OppleCluster.cluster_id, - ], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.OCCUPANCY_SENSOR, - } - }, - manufacturer="Fake_Manufacturer_sensor", - model="Fake_Model_sensor", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].opple_cluster - - -async def test_last_feeding_size_sensor_v2( - hass: HomeAssistant, zigpy_device_aqara_sensor_v2 -) -> None: - """Test quirks defined sensor.""" - - zha_device, cluster = zigpy_device_aqara_sensor_v2 - assert isinstance(zha_device.device, CustomDeviceV2) - entity_id = find_entity_id( - Platform.SENSOR, zha_device, hass, qualifier="last_feeding_size" - ) - assert entity_id is not None - - await send_attributes_report(hass, cluster, {0x010C: 1}) - assert_state(hass, entity_id, "1.0", UnitOfMass.GRAMS.value) - - await send_attributes_report(hass, cluster, {0x010C: 5}) - assert_state(hass, entity_id, "5.0", UnitOfMass.GRAMS.value) - - -@pytest.fixture -async def coordinator(hass: HomeAssistant, zigpy_device_mock, zha_device_joined): - """Test ZHA fan platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.CONTROL_BRIDGE, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ieee="00:15:8d:00:02:32:4f:32", - nwk=0x0000, - node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -async def test_device_counter_sensors( - hass: HomeAssistant, - coordinator: ZHADevice, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, -) -> None: - """Test quirks defined sensor.""" - - entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" - state = hass.states.get(entity_id) - assert state is None - - # Enable the entity. - entity_registry.async_update_entity(entity_id, disabled_by=None) - await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "1" - - # simulate counter increment on application - coordinator.device.application.state.counters["ezsp_counters"][ - "counter_1" - ].increment() - - next_update = dt_util.utcnow() + timedelta(seconds=60) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "2" - - -@pytest.fixture -async def zigpy_device_danfoss_thermostat( - hass: HomeAssistant, zigpy_device_mock, zha_device_joined_restored -): - """Device tracker zigpy danfoss thermostat device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [ - general.Basic.cluster_id, - general.PowerConfiguration.cluster_id, - general.Identify.cluster_id, - general.Time.cluster_id, - general.PollControl.cluster_id, - Thermostat.cluster_id, - hvac.UserInterface.cluster_id, - homeautomation.Diagnostic.cluster_id, - ], - SIG_EP_OUTPUT: [general.Basic.cluster_id, general.Ota.cluster_id], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.THERMOSTAT, - } - }, - manufacturer="Danfoss", - model="eTRV0100", - ) - - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device - - -async def test_danfoss_thermostat_sw_error( - hass: HomeAssistant, zigpy_device_danfoss_thermostat -) -> None: - """Test quirks defined thermostat.""" - - zha_device, zigpy_device = zigpy_device_danfoss_thermostat - - entity_id = find_entity_id( - Platform.SENSOR, zha_device, hass, qualifier="software_error" - ) - assert entity_id is not None - - cluster = zigpy_device.endpoints[1].diagnostic - - await send_attributes_report( - hass, - cluster, - { - danfoss_thermostat.DanfossDiagnosticCluster.AttributeDefs.sw_error_code.id: 0x0001 - }, - ) - - hass_state = hass.states.get(entity_id) - assert hass_state.state == "something" - assert hass_state.attributes["Top_pcb_sensor_error"] diff --git a/tests/components/zha/test_silabs_multiprotocol.py b/tests/components/zha/test_silabs_multiprotocol.py index 03c845269e0..a5f2db22ce5 100644 --- a/tests/components/zha/test_silabs_multiprotocol.py +++ b/tests/components/zha/test_silabs_multiprotocol.py @@ -11,7 +11,7 @@ import zigpy.state from homeassistant.components import zha from homeassistant.components.zha import silabs_multiprotocol -from homeassistant.components.zha.core.helpers import get_zha_gateway +from homeassistant.components.zha.helpers import get_zha_data from homeassistant.core import HomeAssistant if TYPE_CHECKING: @@ -38,8 +38,7 @@ async def test_async_get_channel_missing( """Test reading channel with an inactive ZHA installation, no valid channel.""" await setup_zha() - gateway = get_zha_gateway(hass) - await zha.async_unload_entry(hass, gateway.config_entry) + await zha.async_unload_entry(hass, get_zha_data(hass).config_entry) # Network settings were never loaded for whatever reason zigpy_app_controller.state.network_info = zigpy.state.NetworkInfo() diff --git a/tests/components/zha/test_siren.py b/tests/components/zha/test_siren.py index 652955ef98d..f9837a7d016 100644 --- a/tests/components/zha/test_siren.py +++ b/tests/components/zha/test_siren.py @@ -4,7 +4,11 @@ from datetime import timedelta from unittest.mock import ANY, call, patch import pytest -from zigpy.const import SIG_EP_PROFILE +from zha.application.const import ( + WARNING_DEVICE_MODE_EMERGENCY_PANIC, + WARNING_DEVICE_SOUND_MEDIUM, +) +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE from zigpy.profiles import zha import zigpy.zcl from zigpy.zcl.clusters import general, security @@ -16,16 +20,17 @@ from homeassistant.components.siren import ( ATTR_VOLUME_LEVEL, DOMAIN as SIREN_DOMAIN, ) -from homeassistant.components.zha.core.const import ( - WARNING_DEVICE_MODE_EMERGENCY_PANIC, - WARNING_DEVICE_SOUND_MEDIUM, +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from .common import async_enable_traffic, find_entity_id -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id from tests.common import async_fire_time_changed @@ -46,9 +51,12 @@ def siren_platform_only(): yield -@pytest.fixture -async def siren(hass, zigpy_device_mock, zha_device_joined_restored): - """Siren fixture.""" +async def test_siren(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: + """Test zha siren platform.""" + + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) zigpy_device = zigpy_device_mock( { @@ -58,30 +66,18 @@ async def siren(hass, zigpy_device_mock, zha_device_joined_restored): SIG_EP_TYPE: zha.DeviceType.IAS_WARNING_DEVICE, SIG_EP_PROFILE: zha.PROFILE_ID, } - }, + } ) - zha_device = await zha_device_joined_restored(zigpy_device) - return zha_device, zigpy_device.endpoints[1].ias_wd + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) - -async def test_siren(hass: HomeAssistant, siren) -> None: - """Test zha siren platform.""" - - zha_device, cluster = siren - assert cluster is not None - entity_id = find_entity_id(Platform.SIREN, zha_device, hass) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.SIREN, zha_device_proxy, hass) + cluster = zigpy_device.endpoints[1].ias_wd assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA diff --git a/tests/components/zha/test_switch.py b/tests/components/zha/test_switch.py index c8c2842c400..cc4e41485f9 100644 --- a/tests/components/zha/test_switch.py +++ b/tests/components/zha/test_switch.py @@ -1,51 +1,28 @@ """Test ZHA switch.""" -from unittest.mock import AsyncMock, call, patch +from unittest.mock import call, patch import pytest -from zhaquirks.const import ( - DEVICE_TYPE, - ENDPOINTS, - INPUT_CLUSTERS, - OUTPUT_CLUSTERS, - PROFILE_ID, -) -from zigpy.exceptions import ZigbeeException from zigpy.profiles import zha -from zigpy.quirks import _DEVICE_REGISTRY, CustomCluster, CustomDevice -from zigpy.quirks.v2 import CustomDeviceV2, add_to_registry_v2 -import zigpy.types as t -from zigpy.zcl.clusters import closures, general -from zigpy.zcl.clusters.manufacturer_specific import ManufacturerSpecificCluster +from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN -from homeassistant.components.zha.core.group import GroupMember -from homeassistant.components.zha.core.helpers import get_zha_gateway -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.entity_component import async_update_entity from homeassistant.setup import async_setup_component -from .common import ( - async_enable_traffic, - async_find_group_entity_id, - async_test_rejoin, - async_wait_for_updates, - find_entity_id, - send_attributes_report, - update_attribute_cache, -) +from .common import find_entity_id, send_attributes_report from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE -from tests.common import MockConfigEntry - ON = 1 OFF = 0 -IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8" -IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8" @pytest.fixture(autouse=True) @@ -63,104 +40,51 @@ def switch_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -def zigpy_cover_device(zigpy_device_mock): - """Zigpy cover device.""" - - endpoints = { - 1: { - SIG_EP_PROFILE: zha.PROFILE_ID, - SIG_EP_TYPE: zha.DeviceType.WINDOW_COVERING_DEVICE, - SIG_EP_INPUT: [ - general.Basic.cluster_id, - closures.WindowCovering.cluster_id, - ], - SIG_EP_OUTPUT: [], - } - } - return zigpy_device_mock(endpoints) - - -@pytest.fixture -async def device_switch_1(hass, zigpy_device_mock, zha_device_joined): +async def test_switch(hass: HomeAssistant, setup_zha, zigpy_device_mock) -> None: """Test ZHA switch platform.""" + await setup_zha() + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + zigpy_device = zigpy_device_mock( { 1: { - SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], + SIG_EP_INPUT: [ + general.Basic.cluster_id, + general.OnOff.cluster_id, + general.Groups.cluster_id, + ], SIG_EP_OUTPUT: [], SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, } }, - ieee=IEEE_GROUPABLE_DEVICE, + ieee="01:2d:6f:00:0a:90:69:e8", + node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def device_switch_2(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.OnOff.cluster_id, general.Groups.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - ieee=IEEE_GROUPABLE_DEVICE2, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zha_device - - -async def test_switch( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device -) -> None: - """Test ZHA switch platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + entity_id = find_entity_id(Platform.SWITCH, zha_device_proxy, hass) cluster = zigpy_device.endpoints[1].on_off - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off assert hass.states.get(entity_id).state == STATE_OFF # turn on at switch - await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 2}) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: ON} + ) assert hass.states.get(entity_id).state == STATE_ON # turn off at switch - await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2}) + await send_attributes_report( + hass, cluster, {general.OnOff.AttributeDefs.on_off.id: OFF} + ) assert hass.states.get(entity_id).state == STATE_OFF # turn on from HA @@ -217,765 +141,3 @@ async def test_switch( assert cluster.read_attributes.call_args == call( ["on_off"], allow_cache=False, only_cache=False, manufacturer=None ) - - # test joining a new switch to the network and HA - await async_test_rejoin(hass, zigpy_device, [cluster], (1,)) - - -class WindowDetectionFunctionQuirk(CustomDevice): - """Quirk with window detection function attribute.""" - - class TuyaManufCluster(CustomCluster, ManufacturerSpecificCluster): - """Tuya manufacturer specific cluster.""" - - cluster_id = 0xEF00 - ep_attribute = "tuya_manufacturer" - - attributes = { - 0xEF01: ("window_detection_function", t.Bool), - 0xEF02: ("window_detection_function_inverter", t.Bool), - } - - def __init__(self, *args, **kwargs): - """Initialize with task.""" - super().__init__(*args, **kwargs) - self._attr_cache.update( - {0xEF01: False} - ) # entity won't be created without this - - replacement = { - ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH, - INPUT_CLUSTERS: [general.Basic.cluster_id, TuyaManufCluster], - OUTPUT_CLUSTERS: [], - }, - } - } - - -@pytest.fixture -async def zigpy_device_tuya(hass, zigpy_device_mock, zha_device_joined): - """Device tracker zigpy tuya device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="_TZE200_b6wax7g0", - quirk=WindowDetectionFunctionQuirk, - ) - - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - await hass.async_block_till_done() - return zigpy_device - - -@patch( - "homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY", - new=0, -) -async def test_zha_group_switch_entity( - hass: HomeAssistant, - device_switch_1, - device_switch_2, - entity_registry: er.EntityRegistry, - config_entry: MockConfigEntry, -) -> None: - """Test the switch entity for a ZHA group.""" - - # make sure we can still get groups when counter entities exist - entity_id = "sensor.coordinator_manufacturer_coordinator_model_counter_1" - state = hass.states.get(entity_id) - assert state is None - - # Enable the entity. - entity_registry.async_update_entity(entity_id, disabled_by=None) - await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - - state = hass.states.get(entity_id) - assert state is not None - assert state.state == "1" - - zha_gateway = get_zha_gateway(hass) - assert zha_gateway is not None - device_switch_1._zha_gateway = zha_gateway - device_switch_2._zha_gateway = zha_gateway - member_ieee_addresses = [ - device_switch_1.ieee, - device_switch_2.ieee, - zha_gateway.coordinator_zha_device.ieee, - ] - members = [ - GroupMember(device_switch_1.ieee, 1), - GroupMember(device_switch_2.ieee, 1), - GroupMember(zha_gateway.coordinator_zha_device.ieee, 1), - ] - - # test creating a group with 2 members - zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members) - await hass.async_block_till_done() - - assert zha_group is not None - assert len(zha_group.members) == 3 - for member in zha_group.members: - assert member.device.ieee in member_ieee_addresses - assert member.group == zha_group - assert member.endpoint is not None - - entity_id = async_find_group_entity_id(hass, Platform.SWITCH, zha_group) - assert hass.states.get(entity_id) is not None - - group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id] - dev1_cluster_on_off = device_switch_1.device.endpoints[1].on_off - dev2_cluster_on_off = device_switch_2.device.endpoints[1].on_off - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [device_switch_1, device_switch_2]) - await async_wait_for_updates(hass) - - # test that the switches were created and are off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x00, zcl_f.Status.SUCCESS], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.request.mock_calls) == 1 - assert group_cluster_on_off.request.call_args == call( - False, - ON, - group_cluster_on_off.commands_by_name["on"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert hass.states.get(entity_id).state == STATE_ON - - # test turn off failure case - hold_off = group_cluster_on_off.off - group_cluster_on_off.off = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.off.mock_calls) == 1 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - group_cluster_on_off.off = hold_off - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.request", - return_value=[0x01, zcl_f.Status.SUCCESS], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.request.mock_calls) == 1 - assert group_cluster_on_off.request.call_args == call( - False, - OFF, - group_cluster_on_off.commands_by_name["off"].schema, - expect_reply=True, - manufacturer=None, - tsn=None, - ) - assert hass.states.get(entity_id).state == STATE_OFF - - # test turn on failure case - hold_on = group_cluster_on_off.on - group_cluster_on_off.on = AsyncMock(return_value=[0x01, zcl_f.Status.FAILURE]) - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert len(group_cluster_on_off.on.mock_calls) == 1 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - group_cluster_on_off.on = hold_on - - # test some of the group logic to make sure we key off states correctly - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await send_attributes_report(hass, dev2_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group switch is on - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group switch is still on - assert hass.states.get(entity_id).state == STATE_ON - - await send_attributes_report(hass, dev2_cluster_on_off, {0: 0}) - await async_wait_for_updates(hass) - - # test that group switch is now off - assert hass.states.get(entity_id).state == STATE_OFF - - await send_attributes_report(hass, dev1_cluster_on_off, {0: 1}) - await async_wait_for_updates(hass) - - # test that group switch is now back on - assert hass.states.get(entity_id).state == STATE_ON - - -async def test_switch_configurable( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_tuya -) -> None: - """Test ZHA configurable switch platform.""" - - zha_device = await zha_device_joined_restored(zigpy_device_tuya) - cluster = zigpy_device_tuya.endpoints[1].tuya_manufacturer - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": True}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": False}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": True}, manufacturer=None) - ] - - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None) - ] - - cluster.read_attributes.reset_mock() - await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - - await hass.services.async_call( - "homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True - ) - # the mocking doesn't update the attr cache so this flips back to initial value - assert cluster.read_attributes.call_count == 2 - assert [ - call( - [ - "window_detection_function", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - call( - [ - "window_detection_function_inverter", - ], - allow_cache=False, - only_cache=False, - manufacturer=None, - ), - ] == cluster.read_attributes.call_args_list - - cluster.write_attributes.reset_mock() - cluster.write_attributes.side_effect = ZigbeeException - - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None), - call({"window_detection_function": False}, manufacturer=None), - call({"window_detection_function": False}, manufacturer=None), - ] - - cluster.write_attributes.side_effect = None - - # test inverter - cluster.write_attributes.reset_mock() - cluster._attr_cache.update({0xEF02: True}) - - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": True}, manufacturer=None) - ] - - cluster.write_attributes.reset_mock() - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": False}, manufacturer=None) - ] - - # test joining a new switch to the network and HA - await async_test_rejoin(hass, zigpy_device_tuya, [cluster], (0,)) - - -async def test_switch_configurable_custom_on_off_values( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer", - model="model", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_OFF - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - - -async def test_switch_configurable_custom_on_off_values_force_inverted( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer2", - model="model2", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - force_inverted=True, - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = {"window_detection_function": 5} - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_ON - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_ON - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - - -async def test_switch_configurable_custom_on_off_values_inverter_attribute( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device_mock -) -> None: - """Test ZHA configurable switch platform.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - }, - manufacturer="manufacturer3", - model="model3", - ) - - ( - add_to_registry_v2(zigpy_device.manufacturer, zigpy_device.model) - .adds(WindowDetectionFunctionQuirk.TuyaManufCluster) - .switch( - "window_detection_function", - WindowDetectionFunctionQuirk.TuyaManufCluster.cluster_id, - on_value=3, - off_value=5, - invert_attribute_name="window_detection_function_inverter", - ) - ) - - zigpy_device = _DEVICE_REGISTRY.get_device(zigpy_device) - - assert isinstance(zigpy_device, CustomDeviceV2) - cluster = zigpy_device.endpoints[1].tuya_manufacturer - cluster.PLUGGED_ATTR_READS = { - "window_detection_function": 5, - "window_detection_function_inverter": t.Bool(True), - } - update_attribute_cache(cluster) - - zha_device = await zha_device_joined_restored(zigpy_device) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - assert hass.states.get(entity_id).state == STATE_ON - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the switch was created and that its state is unavailable - assert hass.states.get(entity_id).state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - # test that the state has changed from unavailable to off - assert hass.states.get(entity_id).state == STATE_ON - - # turn on at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 3}) - assert hass.states.get(entity_id).state == STATE_OFF - - # turn off at switch - await send_attributes_report(hass, cluster, {"window_detection_function": 5}) - assert hass.states.get(entity_id).state == STATE_ON - - # turn on from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn on via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 5}, manufacturer=None) - ] - cluster.write_attributes.reset_mock() - - # turn off from HA - with patch( - "zigpy.zcl.Cluster.write_attributes", - return_value=[zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]], - ): - # turn off via UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.mock_calls == [ - call({"window_detection_function": 3}, manufacturer=None) - ] - - -WCAttrs = closures.WindowCovering.AttributeDefs -WCT = closures.WindowCovering.WindowCoveringType -WCCS = closures.WindowCovering.ConfigStatus -WCM = closures.WindowCovering.WindowCoveringMode - - -async def test_cover_inversion_switch( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 65, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.window_covering_type.name: WCT.Tilt_blind_tilt_and_lift, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - WCAttrs.window_covering_mode.name: WCM(WCM.LEDs_display_feedback), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - assert ( - not zha_device.endpoints[1] - .all_cluster_handlers[f"1:0x{cluster.cluster_id:04x}"] - .inverted - ) - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is not None - - await async_enable_traffic(hass, [zha_device], enabled=False) - # test that the cover was created and that it is unavailable - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_UNAVAILABLE - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - await hass.async_block_till_done() - - # test update - prev_call_count = cluster.read_attributes.call_count - await async_update_entity(hass, entity_id) - assert cluster.read_attributes.call_count == prev_call_count + 1 - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - # test to see the state remains after tilting to 0% - await send_attributes_report( - hass, cluster, {WCAttrs.current_position_tilt_percentage.id: 0} - ) - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - with patch( - "zigpy.zcl.Cluster.write_attributes", return_value=[0x1, zcl_f.Status.SUCCESS] - ): - cluster.PLUGGED_ATTR_READS = { - WCAttrs.config_status.name: WCCS.Operational - | WCCS.Open_up_commands_reversed, - } - # turn on from UI - await hass.services.async_call( - SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 1 - assert cluster.write_attributes.call_args_list[0] == call( - { - WCAttrs.window_covering_mode.name: WCM.Motor_direction_reversed - | WCM.LEDs_display_feedback - }, - manufacturer=None, - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_ON - - cluster.write_attributes.reset_mock() - - # turn off from UI - cluster.PLUGGED_ATTR_READS = { - WCAttrs.config_status.name: WCCS.Operational, - } - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 1 - assert cluster.write_attributes.call_args_list[0] == call( - {WCAttrs.window_covering_mode.name: WCM.LEDs_display_feedback}, - manufacturer=None, - ) - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - cluster.write_attributes.reset_mock() - - # test that sending the command again does not result in a write - await hass.services.async_call( - SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True - ) - assert cluster.write_attributes.call_count == 0 - - state = hass.states.get(entity_id) - assert state - assert state.state == STATE_OFF - - -async def test_cover_inversion_switch_not_created( - hass: HomeAssistant, zha_device_joined_restored, zigpy_cover_device -) -> None: - """Test ZHA cover platform.""" - - # load up cover domain - cluster = zigpy_cover_device.endpoints[1].window_covering - cluster.PLUGGED_ATTR_READS = { - WCAttrs.current_position_lift_percentage.name: 65, - WCAttrs.current_position_tilt_percentage.name: 42, - WCAttrs.config_status.name: WCCS(~WCCS.Open_up_commands_reversed), - } - update_attribute_cache(cluster) - zha_device = await zha_device_joined_restored(zigpy_cover_device) - - assert cluster.read_attributes.call_count == 3 - assert ( - WCAttrs.current_position_lift_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - assert ( - WCAttrs.current_position_tilt_percentage.name - in cluster.read_attributes.call_args[0][0] - ) - - # entity should not be created when mode or config status aren't present - entity_id = find_entity_id(Platform.SWITCH, zha_device, hass) - assert entity_id is None diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 32be013e673..6a1a19b407f 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -23,13 +23,25 @@ from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) -from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .common import async_enable_traffic, find_entity_id, update_attribute_cache -from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE +from .common import find_entity_id, update_attribute_cache +from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE @pytest.fixture(autouse=True) @@ -47,28 +59,32 @@ def update_platform_only(): yield -@pytest.fixture -def zigpy_device(zigpy_device_mock): - """Device tracker zigpy device.""" - endpoints = { - 1: { - SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], - SIG_EP_OUTPUT: [general.Ota.cluster_id], - SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, - } - } - return zigpy_device_mock( - endpoints, node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00" - ) - - async def setup_test_data( - zha_device_joined_restored, - zigpy_device, + hass: HomeAssistant, + zigpy_device_mock, skip_attribute_plugs=False, file_not_found=False, ): """Set up test data for the tests.""" + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", + ) + + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + fw_version = 0x12345678 installed_fw_version = fw_version - 10 cluster = zigpy_device.endpoints[1].out_clusters[general.Ota.cluster_id] @@ -106,31 +122,28 @@ async def setup_test_data( cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( return_value=None if file_not_found else fw_image ) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) - zha_device = await zha_device_joined_restored(zigpy_device) - zha_device.async_update_sw_build_id(installed_fw_version) - - return zha_device, cluster, fw_image, installed_fw_version + return zha_device_proxy, cluster, fw_image, installed_fw_version async def test_firmware_update_notification_from_zigpy( hass: HomeAssistant, - zha_device_joined_restored, - zigpy_device, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update notification.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, - zigpy_device, + hass, + zigpy_device_mock, ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -139,7 +152,7 @@ async def test_firmware_update_notification_from_zigpy( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -158,20 +171,20 @@ async def test_firmware_update_notification_from_zigpy( async def test_firmware_update_notification_from_service_call( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update manual check.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, + zigpy_device_mock, ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN async def _async_image_notify_side_effect(*args, **kwargs): await cluster._handle_query_next_image( @@ -180,7 +193,7 @@ async def test_firmware_update_notification_from_service_call( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -245,11 +258,14 @@ def make_packet(zigpy_device, cluster, cmd_name: str, **kwargs): @patch("zigpy.device.AFTER_OTA_ATTR_READ_DELAY", 0.01) async def test_firmware_update_success( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update success.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) assert installed_fw_version < fw_image.firmware.header.file_version @@ -257,10 +273,7 @@ async def test_firmware_update_success( entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -269,7 +282,7 @@ async def test_firmware_update_success( ), general.QueryNextImageCommand( field_control=fw_image.firmware.header.field_control, - manufacturer_code=zha_device.manufacturer_code, + manufacturer_code=zha_device.device.manufacturer_code, image_type=fw_image.firmware.header.image_type, current_file_version=installed_fw_version, ), @@ -289,9 +302,9 @@ async def test_firmware_update_success( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, @@ -309,9 +322,9 @@ async def test_firmware_update_success( assert cmd.image_type == fw_image.firmware.header.image_type assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.image_size == fw_image.firmware.header.image_size - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, @@ -320,7 +333,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=0, maximum_data_size=40, - request_node_addr=zigpy_device.ieee, + request_node_addr=zha_device.device.device.ieee, ) ) elif isinstance( @@ -336,9 +349,9 @@ async def test_firmware_update_success( assert cmd.file_version == fw_image.firmware.header.file_version assert cmd.file_offset == 0 assert cmd.image_data == fw_image.firmware.serialize()[0:40] - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.image_block.name, field_control=general.Ota.ImageBlockCommand.FieldControl.RequestNodeAddr, @@ -347,7 +360,7 @@ async def test_firmware_update_success( file_version=fw_image.firmware.header.file_version, file_offset=40, maximum_data_size=40, - request_node_addr=zigpy_device.ieee, + request_node_addr=zha_device.device.device.ieee, ) ) elif cmd.file_offset == 40: @@ -374,9 +387,9 @@ async def test_firmware_update_success( == f"0x{fw_image.firmware.header.file_version:08x}" ) - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.upgrade_end.name, status=foundation.Status.SUCCESS, @@ -430,7 +443,7 @@ async def test_firmware_update_success( # If we send a progress notification incorrectly, it won't be handled entity = hass.data[UPDATE_DOMAIN].get_entity(entity_id) - entity._update_progress(50, 100, 0.50) + entity.entity_data.entity._update_progress(50, 100, 0.50) state = hass.states.get(entity_id) assert not attrs[ATTR_IN_PROGRESS] @@ -438,20 +451,20 @@ async def test_firmware_update_success( async def test_firmware_update_raises( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update raises.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -460,7 +473,7 @@ async def test_firmware_update_raises( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -481,9 +494,9 @@ async def test_firmware_update_raises( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, @@ -532,20 +545,20 @@ async def test_firmware_update_raises( async def test_firmware_update_no_longer_compatible( - hass: HomeAssistant, zha_device_joined_restored, zigpy_device + hass: HomeAssistant, + setup_zha, + zigpy_device_mock, ) -> None: """Test ZHA update platform - firmware update is no longer valid.""" + await setup_zha() zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - zha_device_joined_restored, zigpy_device + hass, zigpy_device_mock ) entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - # allow traffic to flow through the gateway and device - await async_enable_traffic(hass, [zha_device]) - - assert hass.states.get(entity_id).state == STATE_OFF + assert hass.states.get(entity_id).state == STATE_UNKNOWN # simulate an image available notification await cluster._handle_query_next_image( @@ -554,7 +567,7 @@ async def test_firmware_update_no_longer_compatible( ), general.QueryNextImageCommand( fw_image.firmware.header.field_control, - zha_device.manufacturer_code, + zha_device.device.manufacturer_code, fw_image.firmware.header.image_type, installed_fw_version, fw_image.firmware.header.header_version, @@ -577,9 +590,9 @@ async def test_firmware_update_no_longer_compatible( if cluster_id == general.Ota.cluster_id: hdr, cmd = cluster.deserialize(data) if isinstance(cmd, general.Ota.ImageNotifyCommand): - zigpy_device.packet_received( + zha_device.device.device.packet_received( make_packet( - zigpy_device, + zha_device.device.device, cluster, general.Ota.ServerCommandDefs.query_next_image.name, field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, diff --git a/tests/components/zha/test_websocket_api.py b/tests/components/zha/test_websocket_api.py index 80b9f6accd0..f6afee9eb83 100644 --- a/tests/components/zha/test_websocket_api.py +++ b/tests/components/zha/test_websocket_api.py @@ -10,12 +10,27 @@ from unittest.mock import ANY, AsyncMock, MagicMock, call, patch from freezegun import freeze_time import pytest import voluptuous as vol +from zha.application.const import ( + ATTR_CLUSTER_ID, + ATTR_CLUSTER_TYPE, + ATTR_ENDPOINT_ID, + ATTR_ENDPOINT_NAMES, + ATTR_IEEE, + ATTR_MANUFACTURER, + ATTR_NEIGHBORS, + ATTR_QUIRK_APPLIED, + ATTR_TYPE, + CLUSTER_TYPE_IN, +) +from zha.zigbee.cluster_handlers import ClusterBindEvent, ClusterConfigureReportingEvent +from zha.zigbee.device import ClusterHandlerConfigurationComplete import zigpy.backups +from zigpy.const import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE import zigpy.profiles.zha import zigpy.types from zigpy.types.named import EUI64 import zigpy.util -from zigpy.zcl.clusters import general, security +from zigpy.zcl.clusters import closures, general, security from zigpy.zcl.clusters.general import Groups import zigpy.zdo.types as zdo_types @@ -25,23 +40,12 @@ from homeassistant.components.websocket_api import ( TYPE_RESULT, ) from homeassistant.components.zha import DOMAIN -from homeassistant.components.zha.core.const import ( - ATTR_CLUSTER_ID, - ATTR_CLUSTER_TYPE, - ATTR_ENDPOINT_ID, - ATTR_ENDPOINT_NAMES, - ATTR_IEEE, - ATTR_MANUFACTURER, - ATTR_MODEL, - ATTR_NEIGHBORS, - ATTR_QUIRK_APPLIED, - ATTR_TYPE, - BINDINGS, - CLUSTER_TYPE_IN, - EZSP_OVERWRITE_EUI64, - GROUP_ID, - GROUP_IDS, - GROUP_NAME, +from homeassistant.components.zha.const import EZSP_OVERWRITE_EUI64 +from homeassistant.components.zha.helpers import ( + ZHADeviceProxy, + ZHAGatewayProxy, + get_zha_gateway, + get_zha_gateway_proxy, ) from homeassistant.components.zha.websocket_api import ( ATTR_DURATION, @@ -49,22 +53,19 @@ from homeassistant.components.zha.websocket_api import ( ATTR_QR_CODE, ATTR_SOURCE_IEEE, ATTR_TARGET_IEEE, + BINDINGS, + GROUP_ID, + GROUP_IDS, + GROUP_NAME, ID, SERVICE_PERMIT, TYPE, async_load_api, ) -from homeassistant.const import ATTR_NAME, Platform +from homeassistant.const import ATTR_MODEL, ATTR_NAME, Platform from homeassistant.core import Context, HomeAssistant -from .conftest import ( - FIXTURE_GRP_ID, - FIXTURE_GRP_NAME, - SIG_EP_INPUT, - SIG_EP_OUTPUT, - SIG_EP_PROFILE, - SIG_EP_TYPE, -) +from .conftest import FIXTURE_GRP_ID, FIXTURE_GRP_NAME from .data import BASE_CUSTOM_CONFIGURATION, CONFIG_WITH_ALARM_OPTIONS from tests.common import MockConfigEntry, MockUser @@ -93,10 +94,18 @@ def required_platform_only(): @pytest.fixture -async def device_switch(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA switch platform.""" +async def zha_client( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup_zha, + zigpy_device_mock, +) -> MockHAClientWebSocket: + """Get ZHA WebSocket client.""" - zigpy_device = zigpy_device_mock( + await setup_zha() + gateway = get_zha_gateway(hass) + + zigpy_device_switch = zigpy_device_mock( { 1: { SIG_EP_INPUT: [general.OnOff.cluster_id, general.Basic.cluster_id], @@ -107,35 +116,8 @@ async def device_switch(hass, zigpy_device_mock, zha_device_joined): }, ieee=IEEE_SWITCH_DEVICE, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - -@pytest.fixture -async def device_ias_ace(hass, zigpy_device_mock, zha_device_joined): - """Test alarm control panel device.""" - - zigpy_device = zigpy_device_mock( - { - 1: { - SIG_EP_INPUT: [security.IasAce.cluster_id], - SIG_EP_OUTPUT: [], - SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, - SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, - } - }, - ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device - - -@pytest.fixture -async def device_groupable(hass, zigpy_device_mock, zha_device_joined): - """Test ZHA light platform.""" - - zigpy_device = zigpy_device_mock( + zigpy_device_groupable = zigpy_device_mock( { 1: { SIG_EP_INPUT: [ @@ -150,19 +132,14 @@ async def device_groupable(hass, zigpy_device_mock, zha_device_joined): }, ieee=IEEE_GROUPABLE_DEVICE, ) - zha_device = await zha_device_joined(zigpy_device) - zha_device.available = True - return zha_device + gateway.get_or_create_device(zigpy_device_switch) + await gateway.async_device_initialized(zigpy_device_switch) + await hass.async_block_till_done(wait_background_tasks=True) -@pytest.fixture -async def zha_client( - hass: HomeAssistant, - hass_ws_client: WebSocketGenerator, - device_switch, - device_groupable, -) -> MockHAClientWebSocket: - """Get ZHA WebSocket client.""" + gateway.get_or_create_device(zigpy_device_groupable) + await gateway.async_device_initialized(zigpy_device_groupable) + await hass.async_block_till_done(wait_background_tasks=True) # load the ZHA API async_load_api(hass) @@ -247,7 +224,7 @@ async def test_list_devices(zha_client) -> None: msg = await zha_client.receive_json() devices = msg["result"] - assert len(devices) == 2 + 1 # the coordinator is included as well + assert len(devices) == 3 # the coordinator is included as well msg_id = 100 for device in devices: @@ -284,9 +261,31 @@ async def test_get_zha_config(zha_client) -> None: async def test_get_zha_config_with_alarm( - hass: HomeAssistant, zha_client, device_ias_ace + hass: HomeAssistant, zha_client, zigpy_device_mock ) -> None: """Test getting ZHA custom configuration.""" + + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device_ias = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [security.IasAce.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + + gateway.get_or_create_device(zigpy_device_ias) + await gateway.async_device_initialized(zigpy_device_ias) + await hass.async_block_till_done(wait_background_tasks=True) + zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy( + zigpy_device_ias.ieee + ) + await zha_client.send_json({ID: 5, TYPE: "zha/configuration"}) msg = await zha_client.receive_json() @@ -295,7 +294,7 @@ async def test_get_zha_config_with_alarm( assert configuration == CONFIG_WITH_ALARM_OPTIONS # test that the alarm options are not in the config when we remove the device - device_ias_ace.gateway.device_removed(device_ias_ace.device) + zha_device_proxy.gateway_proxy.gateway.device_removed(zha_device_proxy.device) await hass.async_block_till_done() await zha_client.send_json({ID: 6, TYPE: "zha/configuration"}) @@ -390,11 +389,12 @@ async def test_get_group_not_found(zha_client) -> None: async def test_list_groupable_devices( - zha_client, device_groupable, zigpy_app_controller + hass: HomeAssistant, zha_client, zigpy_app_controller ) -> None: """Test getting ZHA devices that have a group cluster.""" # Ensure the coordinator doesn't have a group cluster coordinator = zigpy_app_controller.get_device(nwk=0x0000) + del coordinator.endpoints[1].in_clusters[Groups.cluster_id] await zha_client.send_json({ID: 10, TYPE: "zha/devices/groupable"}) @@ -425,7 +425,10 @@ async def test_list_groupable_devices( # Make sure there are no groupable devices when the device is unavailable # Make device unavailable - device_groupable.available = False + get_zha_gateway_proxy(hass).device_proxies[ + EUI64.convert(IEEE_GROUPABLE_DEVICE) + ].device.available = False + await hass.async_block_till_done(wait_background_tasks=True) await zha_client.send_json({ID: 11, TYPE: "zha/devices/groupable"}) @@ -437,9 +440,16 @@ async def test_list_groupable_devices( assert len(device_endpoints) == 0 -async def test_add_group(zha_client) -> None: +async def test_add_group(hass: HomeAssistant, zha_client) -> None: """Test adding and getting a new ZHA zigbee group.""" - await zha_client.send_json({ID: 12, TYPE: "zha/group/add", GROUP_NAME: "new_group"}) + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) msg = await zha_client.receive_json() assert msg["id"] == 12 @@ -447,8 +457,17 @@ async def test_add_group(zha_client) -> None: added_group = msg["result"] + groupable_device = get_zha_gateway_proxy(hass).device_proxies[ + EUI64.convert(IEEE_GROUPABLE_DEVICE) + ] + assert added_group["name"] == "new_group" - assert added_group["members"] == [] + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + assert ( + added_group["members"][0]["device"]["device_reg_id"] + == groupable_device.device_id + ) await zha_client.send_json({ID: 13, TYPE: "zha/groups"}) @@ -496,6 +515,82 @@ async def test_remove_group(zha_client) -> None: assert len(groups) == 0 +async def test_add_group_member(hass: HomeAssistant, zha_client) -> None: + """Test adding a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 0 + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/add", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert len(added_group["members"]) == 1 + assert added_group["name"] == "new_group" + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + +async def test_remove_group_member(hass: HomeAssistant, zha_client) -> None: + """Test removing a ZHA zigbee group member.""" + await zha_client.send_json( + { + ID: 12, + TYPE: "zha/group/add", + GROUP_NAME: "new_group", + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 12 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + + assert added_group["name"] == "new_group" + assert len(added_group["members"]) == 1 + assert added_group["members"][0]["device"]["ieee"] == IEEE_GROUPABLE_DEVICE + + await zha_client.send_json( + { + ID: 13, + TYPE: "zha/group/members/remove", + GROUP_ID: added_group["group_id"], + "members": [{"ieee": IEEE_GROUPABLE_DEVICE, "endpoint_id": 1}], + } + ) + + msg = await zha_client.receive_json() + assert msg["id"] == 13 + assert msg["type"] == TYPE_RESULT + + added_group = msg["result"] + assert len(added_group["members"]) == 0 + + @pytest.fixture async def app_controller( hass: HomeAssistant, setup_zha, zigpy_app_controller: ControllerApplication @@ -1037,3 +1132,101 @@ async def test_websocket_bind_unbind_group( assert bind_mock.mock_calls == [call(test_group_id, ANY)] elif command_type == "unbind": assert unbind_mock.mock_calls == [call(test_group_id, ANY)] + + +async def test_websocket_reconfigure( + hass: HomeAssistant, zha_client: MockHAClientWebSocket, zigpy_device_mock +) -> None: + """Test websocket API to reconfigure a device.""" + gateway = get_zha_gateway(hass) + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [closures.WindowCovering.cluster_id], + SIG_EP_OUTPUT: [], + SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.SHADE, + SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID, + } + }, + ) + + zha_device = gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device_proxy = get_zha_gateway_proxy(hass).get_device_proxy(zha_device.ieee) + + def mock_reconfigure() -> None: + zha_device_proxy.handle_zha_channel_configure_reporting( + ClusterConfigureReportingEvent( + cluster_name="Window Covering", + cluster_id=258, + attributes={ + "current_position_lift_percentage": { + "min": 0, + "max": 900, + "id": "current_position_lift_percentage", + "name": "current_position_lift_percentage", + "change": 1, + "status": "SUCCESS", + }, + "current_position_tilt_percentage": { + "min": 0, + "max": 900, + "id": "current_position_tilt_percentage", + "name": "current_position_tilt_percentage", + "change": 1, + "status": "SUCCESS", + }, + }, + cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0102", + event_type="zha_channel_message", + event="zha_channel_configure_reporting", + ) + ) + + zha_device_proxy.handle_zha_channel_bind( + ClusterBindEvent( + cluster_name="Window Covering", + cluster_id=1, + success=True, + cluster_handler_unique_id="28:2c:02:bf:ff:ea:05:68:1:0x0012", + event_type="zha_channel_message", + event="zha_channel_bind", + ) + ) + + zha_device_proxy.handle_zha_channel_cfg_done( + ClusterHandlerConfigurationComplete( + device_ieee="28:2c:02:bf:ff:ea:05:68", + unique_id="28:2c:02:bf:ff:ea:05:68", + event_type="zha_channel_message", + event="zha_channel_cfg_done", + ) + ) + + with patch.object( + zha_device_proxy.device, "async_configure", side_effect=mock_reconfigure + ): + await zha_client.send_json( + { + ID: 6, + TYPE: "zha/devices/reconfigure", + ATTR_IEEE: str(zha_device_proxy.device.ieee), + } + ) + + messages = [] + + while len(messages) != 3: + msg = await zha_client.receive_json() + + if msg[ID] == 6: + messages.append(msg) + + # Ensure the frontend receives progress events + assert {m["event"]["type"] for m in messages} == { + "zha_channel_configure_reporting", + "zha_channel_bind", + "zha_channel_cfg_done", + } diff --git a/tests/components/zha/zha_devices_list.py b/tests/components/zha/zha_devices_list.py deleted file mode 100644 index 4c23244c5e0..00000000000 --- a/tests/components/zha/zha_devices_list.py +++ /dev/null @@ -1,5922 +0,0 @@ -"""Example Zigbee Devices.""" - -from zigpy.const import ( - SIG_ENDPOINTS, - SIG_EP_INPUT, - SIG_EP_OUTPUT, - SIG_EP_PROFILE, - SIG_EP_TYPE, - SIG_MANUFACTURER, - SIG_MODEL, - SIG_NODE_DESC, -) -from zigpy.profiles import zha, zll -from zigpy.types import Bool, uint8_t -from zigpy.zcl.clusters.closures import DoorLock -from zigpy.zcl.clusters.general import ( - Basic, - Groups, - Identify, - LevelControl, - MultistateInput, - OnOff, - Ota, - PowerConfiguration, - Scenes, -) -from zigpy.zcl.clusters.lighting import Color -from zigpy.zcl.clusters.measurement import ( - IlluminanceMeasurement, - OccupancySensing, - TemperatureMeasurement, -) - -DEV_SIG_CLUSTER_HANDLERS = "cluster_handlers" -DEV_SIG_DEV_NO = "device_no" -DEV_SIG_ENT_MAP = "entity_map" -DEV_SIG_ENT_MAP_CLASS = "entity_class" -DEV_SIG_ENT_MAP_ID = "entity_id" -DEV_SIG_EP_ID = "endpoint_id" -DEV_SIG_EVT_CLUSTER_HANDLERS = "event_cluster_handlers" -DEV_SIG_ZHA_QUIRK = "zha_quirk" -DEV_SIG_ATTRIBUTES = "attributes" - - -PROFILE_ID = SIG_EP_PROFILE -DEVICE_TYPE = SIG_EP_TYPE -INPUT_CLUSTERS = SIG_EP_INPUT -OUTPUT_CLUSTERS = SIG_EP_OUTPUT - -DEVICES = [ - { - DEV_SIG_DEV_NO: 0, - SIG_MANUFACTURER: "ADUROLIGHT", - SIG_MODEL: "Adurolight_NCC", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00*d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2080, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4096, 64716], - SIG_EP_OUTPUT: [3, 4, 6, 8, 4096, 64716], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.adurolight_adurolight_ncc_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.adurolight_adurolight_ncc_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 1, - SIG_MANUFACTURER: "Bosch", - SIG_MODEL: "ISW-ZPR1-WP13", - SIG_NODE_DESC: b"\x02@\x08\x00\x00l\x00\x00\x00\x00\x00\x00\x00", - SIG_ENDPOINTS: { - 5: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["5:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-5-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.bosch_isw_zpr1_wp13_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-5-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.bosch_isw_zpr1_wp13_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-5-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.bosch_isw_zpr1_wp13_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-5-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.bosch_isw_zpr1_wp13_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 2, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3130", - SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 2821], - SIG_EP_OUTPUT: [3, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3130_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3130_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3130_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 3, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3210-L", - SIG_NODE_DESC: b"\x01@\x8eN\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2820, 2821, 64515], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.centralite_3210_l_switch", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3210_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3210_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3210_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 4, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3310-S", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 770, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 2821, 64581], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3310_s_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-64581"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3310_s_humidity", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3310_s_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 5, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3315-S", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64527], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3315_s_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3315_s_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3315_s_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3315_s_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 6, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3320-L", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64527], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3320_l_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3320_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3320_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3320_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 7, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "3326-L", - SIG_NODE_DESC: b"\x02@\x80\xdf\xc2RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821, 64582], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 49887, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_3326_l_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_3326_l_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_3326_l_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_3326_l_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 8, - SIG_MANUFACTURER: "CentraLite", - SIG_MODEL: "Motion Sensor-A", - SIG_NODE_DESC: b"\x02@\x80N\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 1030, 2821], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.centralite_motion_sensor_a_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.centralite_motion_sensor_a_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.centralite_motion_sensor_a_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], - DEV_SIG_ENT_MAP_CLASS: "Occupancy", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.centralite_motion_sensor_a_occupancy" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.centralite_motion_sensor_a_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 9, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "PSMP5_00.00.02.02TC", - SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 9, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["4:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: ( - "switch.climaxtechnology_psmp5_00_00_02_02tc_switch" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_psmp5_00_00_02_02tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: ( - "sensor.climaxtechnology_psmp5_00_00_02_02tc_instantaneous_demand" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: ( - "sensor.climaxtechnology_psmp5_00_00_02_02tc_summation_delivered" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_psmp5_00_00_02_02tc_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-4-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.climaxtechnology_psmp5_00_00_02_02tc_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 10, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "SD8SC_00.00.03.12TC", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 1280, 1282], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.climaxtechnology_sd8sc_00_00_03_12tc_ias_zone" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_sd8sc_00_00_03_12tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_sd8sc_00_00_03_12tc_lqi", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_tone" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_siren_level" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe_level" - ), - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", - DEV_SIG_ENT_MAP_ID: ( - "select.climaxtechnology_sd8sc_00_00_03_12tc_default_strobe" - ), - }, - ("siren", "00:11:22:33:44:55:66:77-1-1282"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHASiren", - DEV_SIG_ENT_MAP_ID: "siren.climaxtechnology_sd8sc_00_00_03_12tc_siren", - }, - }, - }, - { - DEV_SIG_DEV_NO: 11, - SIG_MANUFACTURER: "ClimaxTechnology", - SIG_MODEL: "WS15_00.00.03.03TC", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 1280], - SIG_EP_OUTPUT: [0], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.climaxtechnology_ws15_00_00_03_03tc_ias_zone" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.climaxtechnology_ws15_00_00_03_03tc_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.climaxtechnology_ws15_00_00_03_03tc_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 12, - SIG_MANUFACTURER: "Feibit Inc co.", - SIG_MODEL: "FB56-ZCW08KU1.1", - SIG_NODE_DESC: b"\x01@\x8e\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 11: { - SIG_EP_TYPE: 528, - DEV_SIG_EP_ID: 11, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49246, - }, - 13: { - SIG_EP_TYPE: 57694, - DEV_SIG_EP_ID: 13, - SIG_EP_INPUT: [4096], - SIG_EP_OUTPUT: [4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-11"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.feibit_inc_co_fb56_zcw08ku1_1_light", - }, - ("button", "00:11:22:33:44:55:66:77-11-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.feibit_inc_co_fb56_zcw08ku1_1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-11-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-11-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.feibit_inc_co_fb56_zcw08ku1_1_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 13, - SIG_MANUFACTURER: "HEIMAN", - SIG_MODEL: "SmokeSensor-EM", - SIG_NODE_DESC: b"\x02@\x80\x0b\x12RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1280, 1282], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_smokesensor_em_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_smokesensor_em_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_smokesensor_em_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_smokesensor_em_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 14, - SIG_MANUFACTURER: "Heiman", - SIG_MODEL: "CO_V16", - SIG_NODE_DESC: b"\x02@\x84\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_co_v16_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_co_v16_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_co_v16_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_co_v16_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 15, - SIG_MANUFACTURER: "Heiman", - SIG_MODEL: "WarningDevice", - SIG_NODE_DESC: b"\x01@\x8e\x0b\x12RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1027, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 9, 1280, 1282], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("select", "00:11:22:33:44:55:66:77-1-1282-WarningMode"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultToneSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_tone", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-SirenLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultSirenLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_siren_level", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-StrobeLevel"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeLevelSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe_level", - }, - ("select", "00:11:22:33:44:55:66:77-1-1282-Strobe"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHADefaultStrobeSelectEntity", - DEV_SIG_ENT_MAP_ID: "select.heiman_warningdevice_default_strobe", - }, - ("siren", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_wd"], - DEV_SIG_ENT_MAP_CLASS: "ZHASiren", - DEV_SIG_ENT_MAP_ID: "siren.heiman_warningdevice_siren", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.heiman_warningdevice_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.heiman_warningdevice_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.heiman_warningdevice_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.heiman_warningdevice_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 16, - SIG_MANUFACTURER: "HiveHome.com", - SIG_MODEL: "MOT003", - SIG_NODE_DESC: b"\x02@\x809\x10PP\x00\x00\x00P\x00\x00", - SIG_ENDPOINTS: { - 6: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["6:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-6-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.hivehome_com_mot003_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-6-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.hivehome_com_mot003_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-6-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.hivehome_com_mot003_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-6-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.hivehome_com_mot003_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 17, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E12 WS opal 600lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 268, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 4096, 64636], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 97, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [33], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e12_ws_opal_600lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 18, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 CWS opal 600lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 512, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_cws_opal_600lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 19, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 W opal 1000lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_w_opal_1000lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 20, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 WS opal 980lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 544, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_ws_opal_980lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 21, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI bulb E26 opal 1000lm", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 2821, 4096], - SIG_EP_OUTPUT: [5, 25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: ( - "light.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_light" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_lqi" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_bulb_e26_opal_1000lm_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 22, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI control outlet", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 266, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 64636], - SIG_EP_OUTPUT: [5, 25, 32], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: ( - "switch.ikea_of_sweden_tradfri_control_outlet_switch" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_control_outlet_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_control_outlet_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_control_outlet_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 23, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI motion sensor", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2128, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 6, 25, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_motion_sensor_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_motion_sensor_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_motion_sensor_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Motion", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.ikea_of_sweden_tradfri_motion_sensor_motion" - ), - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_motion_sensor_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 24, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI on/off switch", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2080, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 32, 4096, 64636], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 258, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0102"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_on_off_switch_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_on_off_switch_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_on_off_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_on_off_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 25, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI remote control", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2096, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 4096], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_remote_control_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_remote_control_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_remote_control_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_remote_control_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 26, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI signal repeater", - SIG_NODE_DESC: b"\x01@\x8e|\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 8, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 9, 2821, 4096, 64636], - SIG_EP_OUTPUT: [25, 32, 4096], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 97, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [33], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_signal_repeater_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_signal_repeater_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_signal_repeater_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_signal_repeater_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 27, - SIG_MANUFACTURER: "IKEA of Sweden", - SIG_MODEL: "TRADFRI wireless dimmer", - SIG_NODE_DESC: b"\x02@\x80|\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 9, 2821, 4096], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.ikea_of_sweden_tradfri_wireless_dimmer_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_wireless_dimmer_battery" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.ikea_of_sweden_tradfri_wireless_dimmer_rssi" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ikea_of_sweden_tradfri_wireless_dimmer_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ikea_of_sweden_tradfri_wireless_dimmer_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 28, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45852", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 260, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6, 8], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45852_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45852_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45852_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45852_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 29, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45856", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45856_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45856_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45856_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45856_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 30, - SIG_MANUFACTURER: "Jasco Products", - SIG_MODEL: "45857", - SIG_NODE_DESC: b"\x01@\x8e$\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 260, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 3, 2821], - SIG_EP_OUTPUT: [3, 6, 8], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006", "2:0x0008"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "ForceOnLight", - DEV_SIG_ENT_MAP_ID: "light.jasco_products_45857_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.jasco_products_45857_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.jasco_products_45857_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.jasco_products_45857_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 31, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-610-MP-1.3", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_610_mp_1_3_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_610_mp_1_3_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_610_mp_1_3_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_610_mp_1_3_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 32, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-612-MP-1.2", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_2_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_2_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_2_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_2_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 33, - SIG_MANUFACTURER: "Keen Home Inc", - SIG_MODEL: "SV02-612-MP-1.3", - SIG_NODE_DESC: b"\x02@\x80[\x11RR\x00\x00*R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 3, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 8, 32, 1026, 1027, 2821, 64513, 64514], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.keen_home_inc_sv02_612_mp_1_3_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off"], - DEV_SIG_ENT_MAP_CLASS: "KeenVent", - DEV_SIG_ENT_MAP_ID: "cover.keen_home_inc_sv02_612_mp_1_3_keen_vent", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.keen_home_inc_sv02_612_mp_1_3_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.keen_home_inc_sv02_612_mp_1_3_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 34, - SIG_MANUFACTURER: "King Of Fans, Inc.", - SIG_MODEL: "HBUniversalCFRemote", - SIG_NODE_DESC: b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 514], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.king_of_fans_inc_hbuniversalcfremote_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: ( - "button.king_of_fans_inc_hbuniversalcfremote_identify" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.king_of_fans_inc_hbuniversalcfremote_lqi", - }, - ("fan", "00:11:22:33:44:55:66:77-1-514"): { - DEV_SIG_CLUSTER_HANDLERS: ["fan"], - DEV_SIG_ENT_MAP_CLASS: "KofFan", - DEV_SIG_ENT_MAP_ID: "fan.king_of_fans_inc_hbuniversalcfremote_fan", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.king_of_fans_inc_hbuniversalcfremote_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 35, - SIG_MANUFACTURER: "LDS", - SIG_MODEL: "ZBT-CCTSwitch-D0001", - SIG_NODE_DESC: b"\x02@\x80h\x11RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2048, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4096, 64769], - SIG_EP_OUTPUT: [3, 4, 6, 8, 25, 768, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019", "1:0x0300"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lds_zbt_cctswitch_d0001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lds_zbt_cctswitch_d0001_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lds_zbt_cctswitch_d0001_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 36, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "A19 RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_a19_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_a19_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_a19_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_a19_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 37, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "FLEX RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_flex_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_flex_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_flex_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_flex_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 38, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "PLUG", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2821, 64513, 64520], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.ledvance_plug_switch", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_plug_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_plug_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_plug_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 39, - SIG_MANUFACTURER: "LEDVANCE", - SIG_MODEL: "RT RGBW", - SIG_NODE_DESC: b"\x01@\x8e\x89\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2821, 64513], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.ledvance_rt_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.ledvance_rt_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.ledvance_rt_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.ledvance_rt_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 40, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.plug.maus01", - SIG_NODE_DESC: b"\x01@\x8e_\x11\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 81, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 16, 2820], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 9, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [12], - SIG_EP_OUTPUT: [4, 12], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 83, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [12], - SIG_EP_OUTPUT: [12], - SIG_EP_PROFILE: 260, - }, - 100: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 100, - SIG_EP_INPUT: [15], - SIG_EP_OUTPUT: [4, 15], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.lumi_lumi_plug_maus01_switch", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_device_temperature", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_plug_maus01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-100-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_plug_maus01_binary_input", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_plug_maus01_summation_delivered", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_plug_maus01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 41, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.relay.c2acn01", - SIG_NODE_DESC: b"\x01@\x8e7\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 4, 5, 6, 10, 12, 16, 2820], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [4, 5, 6, 16], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_device_temperature", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_relay_c2acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_relay_c2acn01_lqi", - }, - ("light", "00:11:22:33:44:55:66:77-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_relay_c2acn01_light_2", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_relay_c2acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 42, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b186acn01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b186acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b186acn01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b186acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 43, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b286acn01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286acn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286acn01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_remote_b286acn01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 44, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b286opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 3: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 4: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 5: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 6: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b286opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b286opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 45, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b486opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 4: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 5: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - 6: { - SIG_EP_TYPE: -1, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: -1, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b486opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b486opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 46, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b686opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 47, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.remote.b686opcn01", - SIG_NODE_DESC: b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 261, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6, 8, 768], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 4: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 5: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - 6: { - SIG_EP_TYPE: None, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: None, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0300", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_remote_b686opcn01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_remote_b686opcn01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 48, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 49, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6, 11, 17], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 50, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.router", - SIG_NODE_DESC: b"\x01@\x8e_\x11P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 8: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 8, - SIG_EP_INPUT: [0, 6, 17], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["8:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-8"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.lumi_lumi_router_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-8-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_router_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-8-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_router_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 51, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sen_ill.mgl01", - SIG_NODE_DESC: b"\x02@\x84n\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 262, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1024], - SIG_EP_OUTPUT: [3], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_battery", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sen_ill_mgl01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sen_ill_mgl01_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 52, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_86sw1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 18, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 12, 18], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_86sw1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_86sw1_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_86sw1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 53, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_cube.aqgl01", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 28417, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 28418, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3, 18], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 28419, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3, 12], - SIG_EP_OUTPUT: [3, 4, 5, 12], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_cube_aqgl01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_cube_aqgl01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_cube_aqgl01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 54, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_ht", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25, 1026, 1029, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 18, 25, 65535], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 24322, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 4, 5, 18], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 24323, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [3], - SIG_EP_OUTPUT: [3, 4, 5, 12], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005", "3:0x0005"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_ht_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_ht_humidity", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_ht_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 55, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_magnet", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2128, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 25, 65535], - SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_opening", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_magnet_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 56, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_magnet.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 65535], - SIG_EP_OUTPUT: [0, 4, 6, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_magnet_aq2_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_magnet_aq2_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Opening", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_magnet_aq2_opening", - }, - }, - }, - { - DEV_SIG_DEV_NO: 57, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_motion.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 263, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1024, 1030, 1280, 65535], - SIG_EP_OUTPUT: [0, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["occupancy"], - DEV_SIG_ENT_MAP_CLASS: "Occupancy", - DEV_SIG_ENT_MAP_ID: ( - "binary_sensor.lumi_lumi_sensor_motion_aq2_occupancy" - ), - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_motion_aq2_motion", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_motion_aq2_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_motion_aq2_device_temperature" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_motion_aq2_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_motion_aq2_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 58, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_smoke", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 12, 18, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_smoke_smoke", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_smoke_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_smoke_device_temperature" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_smoke_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_smoke_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 59, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [0, 4, 5, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_switch_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 60, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch.aq2", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 65535], - SIG_EP_OUTPUT: [0, 4, 6, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq2_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 61, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_switch.aq3", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 6, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 18], - SIG_EP_OUTPUT: [0, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_switch_aq3_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 62, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.sensor_wleak.aq1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 2, 3, 1280], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_sensor_wleak_aq1_ias_zone", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: ( - "sensor.lumi_lumi_sensor_wleak_aq1_device_temperature" - ), - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_sensor_wleak_aq1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_sensor_wleak_aq1_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_sensor_wleak_aq1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 63, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.vibration.aq1", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.DOOR_LOCK, - INPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Ota.cluster_id, - DoorLock.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - Ota.cluster_id, - DoorLock.cluster_id, - ], - }, - 2: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: 0x5F02, - INPUT_CLUSTERS: [Identify.cluster_id, MultistateInput.cluster_id], - OUTPUT_CLUSTERS: [ - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - MultistateInput.cluster_id, - ], - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0019", "2:0x0005"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.lumi_lumi_vibration_aq1_vibration", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_vibration_aq1_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["device_temperature"], - DEV_SIG_ENT_MAP_CLASS: "DeviceTemperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_vibration_aq1_device_temperature", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.lumi_lumi_vibration_aq1_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 64, - SIG_MANUFACTURER: "LUMI", - SIG_MODEL: "lumi.weather", - SIG_NODE_DESC: b"\x02@\x807\x10\x7fd\x00\x00\x00d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 24321, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 1026, 1027, 1029, 65535], - SIG_EP_OUTPUT: [0, 4, 65535], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.lumi_lumi_weather_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1027"): { - DEV_SIG_CLUSTER_HANDLERS: ["pressure"], - DEV_SIG_ENT_MAP_CLASS: "Pressure", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_pressure", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1029"): { - DEV_SIG_CLUSTER_HANDLERS: ["humidity"], - DEV_SIG_ENT_MAP_CLASS: "Humidity", - DEV_SIG_ENT_MAP_ID: "sensor.lumi_lumi_weather_humidity", - }, - }, - }, - { - DEV_SIG_DEV_NO: 65, - SIG_MANUFACTURER: "NYCE", - SIG_MODEL: "3010", - SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1280], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3010_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.nyce_3010_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3010_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 66, - SIG_MANUFACTURER: "NYCE", - SIG_MODEL: "3014", - SIG_NODE_DESC: b"\x02@\x80\xb9\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1280], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.nyce_3014_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.nyce_3014_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.nyce_3014_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 67, - SIG_MANUFACTURER: None, - SIG_MODEL: None, - SIG_NODE_DESC: b"\x10@\x0f5\x11Y=\x00@\x00=\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 5, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [10, 25], - SIG_EP_OUTPUT: [1280], - SIG_EP_PROFILE: 260, - }, - 242: { - SIG_EP_TYPE: 100, - DEV_SIG_EP_ID: 242, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [33], - SIG_EP_PROFILE: 41440, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: {}, - }, - { - DEV_SIG_DEV_NO: 68, - SIG_MANUFACTURER: None, - SIG_MODEL: None, - SIG_NODE_DESC: b"\x00@\x8f\xcd\xabR\x80\x00\x00\x00\x80\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 48879, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [], - SIG_EP_OUTPUT: [1280], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: {}, - }, - { - DEV_SIG_DEV_NO: 69, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY A19 RGBW", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_a19_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_a19_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_a19_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_a19_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 70, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY Dimming Switch", - SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 2821], - SIG_EP_OUTPUT: [3, 6, 8, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0006", "1:0x0008", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_dimming_switch_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_dimming_switch_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_dimming_switch_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 71, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY Flex RGBW", - SIG_NODE_DESC: b"\x19@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_flex_rgbw_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_flex_rgbw_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_flex_rgbw_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_flex_rgbw_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 72, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "LIGHTIFY RT Tunable White", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 258, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 2820, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "light_color", "level"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.osram_lightify_rt_tunable_white_light", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_lightify_rt_tunable_white_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_power"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_current"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: ("sensor.osram_lightify_rt_tunable_white_voltage"), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_ac_frequency" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: ( - "sensor.osram_lightify_rt_tunable_white_power_factor" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_lightify_rt_tunable_white_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_lightify_rt_tunable_white_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 73, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "Plug 01", - SIG_NODE_DESC: b"\x01@\x8e\xaa\xbb@\x00\x00\x00\x00\x00\x00\x03", - SIG_ENDPOINTS: { - 3: { - SIG_EP_TYPE: 16, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 4096, 64527], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 49246, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["3:0x0019"], - DEV_SIG_ENT_MAP: { - ("switch", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.osram_plug_01_switch", - }, - ("button", "00:11:22:33:44:55:66:77-3-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.osram_plug_01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-3-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_plug_01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-3-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_plug_01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 74, - SIG_MANUFACTURER: "OSRAM", - SIG_MODEL: "Switch 4x-LIGHTIFY", - SIG_NODE_DESC: b"\x02@\x80\x0c\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 32, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 25, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 5: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 5, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - 6: { - SIG_EP_TYPE: 2064, - DEV_SIG_EP_ID: 6, - SIG_EP_INPUT: [0, 4096, 64768], - SIG_EP_OUTPUT: [3, 4, 5, 6, 8, 768, 4096], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [ - "1:0x0005", - "1:0x0006", - "1:0x0008", - "1:0x0019", - "1:0x0300", - "2:0x0005", - "2:0x0006", - "2:0x0008", - "2:0x0300", - "3:0x0005", - "3:0x0006", - "3:0x0008", - "3:0x0300", - "4:0x0005", - "4:0x0006", - "4:0x0008", - "4:0x0300", - "5:0x0005", - "5:0x0006", - "5:0x0008", - "5:0x0300", - "6:0x0005", - "6:0x0006", - "6:0x0008", - "6:0x0300", - ], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.osram_switch_4x_lightify_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.osram_switch_4x_lightify_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 75, - SIG_MANUFACTURER: "Philips", - SIG_MODEL: "RWL020", - SIG_NODE_DESC: b"\x02@\x80\x0b\x10G-\x00\x00\x00-\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2096, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0], - SIG_EP_OUTPUT: [0, 3, 4, 5, 6, 8], - SIG_EP_PROFILE: 49246, - }, - 2: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 1, 3, 15, 64512], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0008", "2:0x0019"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_rwl020_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-2-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.philips_rwl020_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.philips_rwl020_battery", - }, - ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.philips_rwl020_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 76, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "button", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_button_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_button_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_button_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_button_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 77, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "multi", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 64514], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_multi_identify", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-64514"): { - DEV_SIG_CLUSTER_HANDLERS: ["accelerometer"], - DEV_SIG_ENT_MAP_CLASS: "Accelerometer", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_multi_accelerometer", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_multi_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_multi_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 78, - SIG_MANUFACTURER: "Samjin", - SIG_MODEL: "water", - SIG_NODE_DESC: b"\x02@\x80A\x12RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.samjin_water_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.samjin_water_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.samjin_water_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.samjin_water_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 79, - SIG_MANUFACTURER: "Securifi Ltd.", - SIG_MODEL: None, - SIG_NODE_DESC: b"\x01@\x8e\x02\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 0, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 2820, 2821], - SIG_EP_OUTPUT: [0, 1, 3, 4, 5, 6, 25, 2820, 2821], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0005", "1:0x0006", "1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.securifi_ltd_unk_model_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.securifi_ltd_unk_model_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.securifi_ltd_unk_model_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.securifi_ltd_unk_model_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 80, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-DWS04N_SF", - SIG_NODE_DESC: b"\x02@\x801\x11R\xff\x00\x00\x00\xff\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_dws04n_sf_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_dws04n_sf_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_dws04n_sf_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_dws04n_sf_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 81, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-ESW01", - SIG_NODE_DESC: b"\x01@\x8e1\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 1794, 2820, 2821], - SIG_EP_OUTPUT: [3, 10, 25, 2821], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 259, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [0, 1, 3], - SIG_EP_OUTPUT: [3, 6], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019", "2:0x0006"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.sercomm_corp_sz_esw01_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_esw01_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_esw01_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_esw01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 82, - SIG_MANUFACTURER: "Sercomm Corp.", - SIG_MODEL: "SZ-PIR04", - SIG_NODE_DESC: b"\x02@\x801\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1024, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.sercomm_corp_sz_pir04_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sercomm_corp_sz_pir04_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_illuminance", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sercomm_corp_sz_pir04_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sercomm_corp_sz_pir04_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 83, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "RM3250ZB", - SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00*+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 2820, 2821, 65281], - SIG_EP_OUTPUT: [3, 4, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_rm3250zb_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_rm3250zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_rm3250zb_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.sinope_technologies_rm3250zb_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_rm3250zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 84, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "TH1123ZB", - SIG_NODE_DESC: b"\x12@\x8c\x9c\x11G+\x00\x00\x00+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], - SIG_EP_OUTPUT: [25, 65281], - SIG_EP_PROFILE: 260, - }, - 196: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 196, - SIG_EP_INPUT: [1], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49757, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1123zb_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: [ - "thermostat", - "sinope_manufacturer_specific", - ], - DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", - DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1123zb_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_th1123zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1123zb_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1123zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 85, - SIG_MANUFACTURER: "Sinope Technologies", - SIG_MODEL: "TH1124ZB", - SIG_NODE_DESC: b"\x11@\x8e\x9c\x11G+\x00\x00\x00+\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 513, 516, 1026, 2820, 2821, 65281], - SIG_EP_OUTPUT: [25, 65281], - SIG_EP_PROFILE: 260, - }, - 196: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 196, - SIG_EP_INPUT: [1], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49757, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sinope_technologies_th1124zb_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: [ - "thermostat", - "sinope_manufacturer_specific", - ], - DEV_SIG_ENT_MAP_CLASS: "SinopeTechnologiesThermostat", - DEV_SIG_ENT_MAP_ID: "climate.sinope_technologies_th1124zb_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "PolledElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: ( - "sensor.sinope_technologies_th1124zb_apparent_power" - ), - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SinopeHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.sinope_technologies_th1124zb_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sinope_technologies_th1124zb_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 86, - SIG_MANUFACTURER: "SmartThings", - SIG_MODEL: "outletv4", - SIG_NODE_DESC: b"\x01@\x8e\n\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 9, 15, 2820], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_outletv4_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.smartthings_outletv4_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurement", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-apparent_power"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementApparentPower", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_apparent_power", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_current"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSCurrent", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_current", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-rms_voltage"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementRMSVoltage", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_voltage", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-ac_frequency"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementFrequency", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_ac_frequency", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-2820-power_factor"): { - DEV_SIG_CLUSTER_HANDLERS: ["electrical_measurement"], - DEV_SIG_ENT_MAP_CLASS: "ElectricalMeasurementPowerFactor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_power_factor", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_outletv4_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.smartthings_outletv4_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.smartthings_outletv4_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 87, - SIG_MANUFACTURER: "SmartThings", - SIG_MODEL: "tagv4", - SIG_NODE_DESC: b"\x02@\x80\n\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 32768, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 15, 32], - SIG_EP_OUTPUT: [3, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("device_tracker", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "ZHADeviceScannerEntity", - DEV_SIG_ENT_MAP_ID: "device_tracker.smartthings_tagv4_device_scanner", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-15"): { - DEV_SIG_CLUSTER_HANDLERS: ["binary_input"], - DEV_SIG_ENT_MAP_CLASS: "BinaryInput", - DEV_SIG_ENT_MAP_ID: "binary_sensor.smartthings_tagv4_binary_input", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.smartthings_tagv4_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.smartthings_tagv4_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.smartthings_tagv4_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 88, - SIG_MANUFACTURER: "Third Reality, Inc", - SIG_MODEL: "3RSS007Z", - SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 25], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss007z_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss007z_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss007z_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss007z_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 89, - SIG_MANUFACTURER: "Third Reality, Inc", - SIG_MODEL: "3RSS008Z", - SIG_NODE_DESC: b"\x02@\x803\x12\x7fd\x00\x00,d\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 2, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 6, 25], - SIG_EP_OUTPUT: [1], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.third_reality_inc_3rss008z_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.third_reality_inc_3rss008z_lqi", - }, - ("switch", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.third_reality_inc_3rss008z_switch", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.third_reality_inc_3rss008z_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 90, - SIG_MANUFACTURER: "Visonic", - SIG_MODEL: "MCT-340 E", - SIG_NODE_DESC: b"\x02@\x80\x11\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 32, 1026, 1280, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.visonic_mct_340_e_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.visonic_mct_340_e_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.visonic_mct_340_e_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.visonic_mct_340_e_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 91, - SIG_MANUFACTURER: "Zen Within", - SIG_MODEL: "Zen-01", - SIG_NODE_DESC: b"\x02@\x80X\x11R\x80\x00\x00\x00\x80\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 769, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 4, 5, 32, 513, 514, 516, 2821], - SIG_EP_OUTPUT: [10, 25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.zen_within_zen_01_identify", - }, - ("climate", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat", "fan"], - DEV_SIG_ENT_MAP_CLASS: "ZenWithinThermostat", - DEV_SIG_ENT_MAP_ID: "climate.zen_within_zen_01_thermostat", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_lqi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-hvac_action"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "ThermostatHVACAction", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_hvac_action", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-pi_heating_demand"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "PiHeatingDemand", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_pi_heating_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-513-setpoint_change_source"): { - DEV_SIG_CLUSTER_HANDLERS: ["thermostat"], - DEV_SIG_ENT_MAP_CLASS: "SetpointChangeSource", - DEV_SIG_ENT_MAP_ID: "sensor.zen_within_zen_01_setpoint_change_source", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.zen_within_zen_01_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 92, - SIG_MANUFACTURER: "_TYZB01_ns1ndbww", - SIG_MODEL: "TS0004", - SIG_NODE_DESC: b"\x01@\x8e\x02\x10R\x00\x02\x00,\x00\x02\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 4, 5, 6, 10], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - 2: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 2, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - 3: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 3, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - 4: { - SIG_EP_TYPE: 256, - DEV_SIG_EP_ID: 4, - SIG_EP_INPUT: [4, 5, 6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.tyzb01_ns1ndbww_ts0004_lqi", - }, - ("light", "00:11:22:33:44:55:66:77-2"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_2", - }, - ("light", "00:11:22:33:44:55:66:77-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_3", - }, - ("light", "00:11:22:33:44:55:66:77-4"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Light", - DEV_SIG_ENT_MAP_ID: "light.tyzb01_ns1ndbww_ts0004_light_4", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.tyzb01_ns1ndbww_ts0004_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 93, - SIG_MANUFACTURER: "netvox", - SIG_MODEL: "Z308E3ED", - SIG_NODE_DESC: b"\x02@\x80\x9f\x10RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 1026, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 3, 21, 32, 1280, 2821], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("binary_sensor", "00:11:22:33:44:55:66:77-1-1280"): { - DEV_SIG_CLUSTER_HANDLERS: ["ias_zone"], - DEV_SIG_ENT_MAP_CLASS: "IASZone", - DEV_SIG_ENT_MAP_ID: "binary_sensor.netvox_z308e3ed_ias_zone", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.netvox_z308e3ed_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.netvox_z308e3ed_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 94, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "E11-G13", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_e11_g13_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_e11_g13_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e11_g13_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_e11_g13_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 95, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "E12-N14", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_e12_n14_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_e12_n14_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_e12_n14_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_e12_n14_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 96, - SIG_MANUFACTURER: "sengled", - SIG_MODEL: "Z01-A19NAE26", - SIG_NODE_DESC: b"\x02@\x8c`\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 257, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 768, 1794, 2821], - SIG_EP_OUTPUT: [25], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["1:0x0019"], - DEV_SIG_ENT_MAP: { - ("light", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off", "level", "light_color"], - DEV_SIG_ENT_MAP_CLASS: "MinTransitionLight", - DEV_SIG_ENT_MAP_ID: "light.sengled_z01_a19nae26_light", - }, - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.sengled_z01_a19nae26_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergyMetering", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_instantaneous_demand", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1794-summation_delivered"): { - DEV_SIG_CLUSTER_HANDLERS: ["smartenergy_metering"], - DEV_SIG_ENT_MAP_CLASS: "SmartEnergySummation", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_summation_delivered", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.sengled_z01_a19nae26_lqi", - }, - ("update", "00:11:22:33:44:55:66:77-1-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.sengled_z01_a19nae26_firmware", - }, - }, - }, - { - DEV_SIG_DEV_NO: 97, - SIG_MANUFACTURER: "unk_manufacturer", - SIG_MODEL: "unk_model", - SIG_NODE_DESC: b"\x01@\x8e\x10\x11RR\x00\x00\x00R\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 512, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 3, 4, 5, 6, 8, 10, 21, 256, 64544, 64545], - SIG_EP_OUTPUT: [3, 64544], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.unk_manufacturer_unk_model_identify", - }, - ("cover", "00:11:22:33:44:55:66:77-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["level", "on_off", "shade"], - DEV_SIG_ENT_MAP_CLASS: "Shade", - DEV_SIG_ENT_MAP_ID: "cover.unk_manufacturer_unk_model_shade", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.unk_manufacturer_unk_model_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 98, - SIG_MANUFACTURER: "Digi", - SIG_MODEL: "XBee3", - SIG_NODE_DESC: b"\x01@\x8e\x1e\x10R\xff\x00\x00,\xff\x00\x00", - SIG_ENDPOINTS: { - 208: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 208, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 209: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 209, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 210: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 210, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 211: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 211, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 212: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 212, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 213: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 213, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 214: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 214, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 215: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 215, - SIG_EP_INPUT: [6, 12], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 216: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 216, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 217: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 217, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 218: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 218, - SIG_EP_INPUT: [6, 13], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 219: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 219, - SIG_EP_INPUT: [6, 13], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 220: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 220, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 221: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 221, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 222: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 222, - SIG_EP_INPUT: [6], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 49413, - }, - 232: { - SIG_EP_TYPE: 1, - DEV_SIG_EP_ID: 232, - SIG_EP_INPUT: [17, 146], - SIG_EP_OUTPUT: [8, 17], - SIG_EP_PROFILE: 49413, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: ["232:0x0008"], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-208-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input", - }, - ("switch", "00:11:22:33:44:55:66:77-208-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch", - }, - ("sensor", "00:11:22:33:44:55:66:77-209-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_2", - }, - ("switch", "00:11:22:33:44:55:66:77-209-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_2", - }, - ("sensor", "00:11:22:33:44:55:66:77-210-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_3", - }, - ("switch", "00:11:22:33:44:55:66:77-210-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_3", - }, - ("sensor", "00:11:22:33:44:55:66:77-211-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_4", - }, - ("switch", "00:11:22:33:44:55:66:77-211-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_4", - }, - ("switch", "00:11:22:33:44:55:66:77-212-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_5", - }, - ("switch", "00:11:22:33:44:55:66:77-213-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_6", - }, - ("switch", "00:11:22:33:44:55:66:77-214-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_7", - }, - ("sensor", "00:11:22:33:44:55:66:77-215-12"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_input"], - DEV_SIG_ENT_MAP_CLASS: "AnalogInput", - DEV_SIG_ENT_MAP_ID: "sensor.digi_xbee3_analog_input_5", - }, - ("switch", "00:11:22:33:44:55:66:77-215-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_8", - }, - ("switch", "00:11:22:33:44:55:66:77-216-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_9", - }, - ("switch", "00:11:22:33:44:55:66:77-217-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_10", - }, - ("number", "00:11:22:33:44:55:66:77-218-13"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], - DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", - DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number", - }, - ("switch", "00:11:22:33:44:55:66:77-218-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_11", - }, - ("switch", "00:11:22:33:44:55:66:77-219-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_12", - }, - ("number", "00:11:22:33:44:55:66:77-219-13"): { - DEV_SIG_CLUSTER_HANDLERS: ["analog_output"], - DEV_SIG_ENT_MAP_CLASS: "ZhaNumber", - DEV_SIG_ENT_MAP_ID: "number.digi_xbee3_number_2", - }, - ("switch", "00:11:22:33:44:55:66:77-220-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_13", - }, - ("switch", "00:11:22:33:44:55:66:77-221-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_14", - }, - ("switch", "00:11:22:33:44:55:66:77-222-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Switch", - DEV_SIG_ENT_MAP_ID: "switch.digi_xbee3_switch_15", - }, - }, - }, - { - DEV_SIG_DEV_NO: 99, - SIG_MANUFACTURER: "efektalab.ru", - SIG_MODEL: "EFEKTA_PWS", - SIG_NODE_DESC: b"\x02@\x80\x00\x00P\xa0\x00\x00\x00\xa0\x00\x00", - SIG_ENDPOINTS: { - 1: { - SIG_EP_TYPE: 12, - DEV_SIG_EP_ID: 1, - SIG_EP_INPUT: [0, 1, 1026, 1032], - SIG_EP_OUTPUT: [], - SIG_EP_PROFILE: 260, - }, - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1032"): { - DEV_SIG_CLUSTER_HANDLERS: ["soil_moisture"], - DEV_SIG_ENT_MAP_CLASS: "SoilMoisture", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_soil_moisture", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_temperature", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.efektalab_ru_efekta_pws_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 100, - SIG_MANUFACTURER: "Konke", - SIG_MODEL: "3AFE170100510001", - SIG_NODE_DESC: b"\x02@\x80\x02\x10RR\x00\x00,R\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: 260, - DEVICE_TYPE: zha.DeviceType.ON_OFF_OUTPUT, - INPUT_CLUSTERS: [ - Basic.cluster_id, - PowerConfiguration.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - OnOff.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Identify.cluster_id, - ], - } - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-1-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.konke_3afe170100510001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.konke_3afe170100510001_lqi", - }, - }, - }, - { - DEV_SIG_DEV_NO: 101, - SIG_MANUFACTURER: "Philips", - SIG_MODEL: "SML001", - SIG_NODE_DESC: b"\x02@\x80\x0b\x10Y?\x00\x00\x00?\x00\x00", - SIG_ENDPOINTS: { - 1: { - PROFILE_ID: zll.PROFILE_ID, - DEVICE_TYPE: zll.DeviceType.ON_OFF_SENSOR, - INPUT_CLUSTERS: [Basic.cluster_id], - OUTPUT_CLUSTERS: [ - Basic.cluster_id, - Identify.cluster_id, - Groups.cluster_id, - Scenes.cluster_id, - OnOff.cluster_id, - LevelControl.cluster_id, - Color.cluster_id, - ], - }, - 2: { - PROFILE_ID: zha.PROFILE_ID, - DEVICE_TYPE: zha.DeviceType.OCCUPANCY_SENSOR, - INPUT_CLUSTERS: [ - Basic.cluster_id, - PowerConfiguration.cluster_id, - Identify.cluster_id, - IlluminanceMeasurement.cluster_id, - TemperatureMeasurement.cluster_id, - OccupancySensing.cluster_id, - ], - OUTPUT_CLUSTERS: [ - Ota.cluster_id, - ], - }, - }, - DEV_SIG_ATTRIBUTES: { - 2: { - "basic": { - "trigger_indicator": Bool(False), - }, - "philips_occupancy": { - "sensitivity": uint8_t(1), - }, - } - }, - DEV_SIG_EVT_CLUSTER_HANDLERS: [ - "1:0x0005", - "1:0x0006", - "1:0x0008", - "1:0x0300", - "2:0x0019", - ], - DEV_SIG_ENT_MAP: { - ("button", "00:11:22:33:44:55:66:77-2-3"): { - DEV_SIG_CLUSTER_HANDLERS: ["identify"], - DEV_SIG_ENT_MAP_CLASS: "ZHAIdentifyButton", - DEV_SIG_ENT_MAP_ID: "button.philips_sml001_identify", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1"): { - DEV_SIG_CLUSTER_HANDLERS: ["power"], - DEV_SIG_ENT_MAP_CLASS: "Battery", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_battery", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-rssi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "RSSISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_rssi", - }, - ("sensor", "00:11:22:33:44:55:66:77-1-0-lqi"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "LQISensor", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_lqi", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-1-6"): { - DEV_SIG_CLUSTER_HANDLERS: ["on_off"], - DEV_SIG_ENT_MAP_CLASS: "Motion", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_motion", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1024"): { - DEV_SIG_CLUSTER_HANDLERS: ["illuminance"], - DEV_SIG_ENT_MAP_CLASS: "Illuminance", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_illuminance", - }, - ("binary_sensor", "00:11:22:33:44:55:66:77-2-1030"): { - DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], - DEV_SIG_ENT_MAP_CLASS: "HueOccupancy", - DEV_SIG_ENT_MAP_ID: "binary_sensor.philips_sml001_occupancy", - }, - ("sensor", "00:11:22:33:44:55:66:77-2-1026"): { - DEV_SIG_CLUSTER_HANDLERS: ["temperature"], - DEV_SIG_ENT_MAP_CLASS: "Temperature", - DEV_SIG_ENT_MAP_ID: "sensor.philips_sml001_temperature", - }, - ("switch", "00:11:22:33:44:55:66:77-2-0-trigger_indicator"): { - DEV_SIG_CLUSTER_HANDLERS: ["basic"], - DEV_SIG_ENT_MAP_CLASS: "HueMotionTriggerIndicatorSwitch", - DEV_SIG_ENT_MAP_ID: "switch.philips_sml001_led_trigger_indicator", - }, - ("select", "00:11:22:33:44:55:66:77-2-1030-motion_sensitivity"): { - DEV_SIG_CLUSTER_HANDLERS: ["philips_occupancy"], - DEV_SIG_ENT_MAP_CLASS: "HueV1MotionSensitivity", - DEV_SIG_ENT_MAP_ID: "select.philips_sml001_motion_sensitivity", - }, - ("update", "00:11:22:33:44:55:66:77-2-25-firmware_update"): { - DEV_SIG_CLUSTER_HANDLERS: ["ota"], - DEV_SIG_ENT_MAP_CLASS: "ZHAFirmwareUpdateEntity", - DEV_SIG_ENT_MAP_ID: "update.philips_sml001_firmware", - }, - }, - }, -] diff --git a/tests/components/zone/test_trigger.py b/tests/components/zone/test_trigger.py index 6ec5e2fd894..a28b3c0592a 100644 --- a/tests/components/zone/test_trigger.py +++ b/tests/components/zone/test_trigger.py @@ -8,7 +8,7 @@ from homeassistant.core import Context, HomeAssistant, ServiceCall from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component -from tests.common import async_mock_service, mock_component +from tests.common import mock_component @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -16,14 +16,8 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") - - @pytest.fixture(autouse=True) -def setup_comp(hass): +def setup_comp(hass: HomeAssistant) -> None: """Initialize components.""" mock_component(hass, "group") hass.loop.run_until_complete( @@ -43,7 +37,7 @@ def setup_comp(hass): async def test_if_fires_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone enter.""" context = Context() @@ -88,9 +82,11 @@ async def test_if_fires_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + ) # Set out of zone again so we can trigger call hass.states.async_set( @@ -104,17 +100,20 @@ async def test_if_fires_on_zone_enter( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_fires_on_zone_enter_uuid( - hass: HomeAssistant, entity_registry: er.EntityRegistry, calls: list[ServiceCall] + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + service_calls: list[ServiceCall], ) -> None: """Test for firing on zone enter when device is specified by entity registry id.""" context = Context() @@ -165,9 +164,11 @@ async def test_if_fires_on_zone_enter_uuid( ) await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].context.parent_id == context.id - assert calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + assert len(service_calls) == 1 + assert service_calls[0].context.parent_id == context.id + assert ( + service_calls[0].data["some"] == "zone - test.entity - hello - hello - test - 0" + ) # Set out of zone again so we can trigger call hass.states.async_set( @@ -181,17 +182,18 @@ async def test_if_fires_on_zone_enter_uuid( {ATTR_ENTITY_ID: ENTITY_MATCH_ALL}, blocking=True, ) + assert len(service_calls) == 2 hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 2 async def test_if_not_fires_for_enter_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone leave.""" hass.states.async_set( @@ -220,11 +222,11 @@ async def test_if_not_fires_for_enter_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_if_fires_on_zone_leave( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for firing on zone leave.""" hass.states.async_set( @@ -253,11 +255,11 @@ async def test_if_fires_on_zone_leave( ) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_if_not_fires_for_leave_on_zone_enter( - hass: HomeAssistant, calls: list[ServiceCall] + hass: HomeAssistant, service_calls: list[ServiceCall] ) -> None: """Test for not firing on zone enter.""" hass.states.async_set( @@ -286,10 +288,12 @@ async def test_if_not_fires_for_leave_on_zone_enter( ) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 -async def test_zone_condition(hass: HomeAssistant, calls: list[ServiceCall]) -> None: +async def test_zone_condition( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: """Test for zone condition.""" hass.states.async_set( "test.entity", "hello", {"latitude": 32.880586, "longitude": -117.237564} @@ -314,11 +318,11 @@ async def test_zone_condition(hass: HomeAssistant, calls: list[ServiceCall]) -> hass.bus.async_fire("test_event") await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 async def test_unknown_zone( - hass: HomeAssistant, calls: list[ServiceCall], caplog: pytest.LogCaptureFixture + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test for firing on zone enter.""" context = Context() diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 31c9c5affa5..60deb7dbce8 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -1,9 +1,11 @@ """Provide common Z-Wave JS fixtures.""" import asyncio +from collections.abc import Generator import copy import io import json +from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest @@ -20,13 +22,13 @@ from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="addon_info_side_effect") -def addon_info_side_effect_fixture(): +def addon_info_side_effect_fixture() -> Any | None: """Return the add-on info side effect.""" return None @pytest.fixture(name="addon_info") -def mock_addon_info(addon_info_side_effect): +def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_info", @@ -44,13 +46,15 @@ def mock_addon_info(addon_info_side_effect): @pytest.fixture(name="addon_store_info_side_effect") -def addon_store_info_side_effect_fixture(): +def addon_store_info_side_effect_fixture() -> Any | None: """Return the add-on store info side effect.""" return None @pytest.fixture(name="addon_store_info") -def mock_addon_store_info(addon_store_info_side_effect): +def mock_addon_store_info( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", @@ -66,7 +70,7 @@ def mock_addon_store_info(addon_store_info_side_effect): @pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): +def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: """Mock add-on already running.""" addon_store_info.return_value = { "available": True, @@ -81,7 +85,9 @@ def mock_addon_running(addon_store_info, addon_info): @pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): +def mock_addon_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: """Mock add-on already installed but not running.""" addon_store_info.return_value = { "available": True, @@ -96,23 +102,27 @@ def mock_addon_installed(addon_store_info, addon_info): @pytest.fixture(name="addon_not_installed") -def mock_addon_not_installed(addon_store_info, addon_info): +def mock_addon_not_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: """Mock add-on not installed.""" addon_store_info.return_value["available"] = True return addon_info @pytest.fixture(name="addon_options") -def mock_addon_options(addon_info): +def mock_addon_options(addon_info: AsyncMock): """Mock add-on options.""" return addon_info.return_value["options"] @pytest.fixture(name="set_addon_options_side_effect") -def set_addon_options_side_effect_fixture(addon_options): +def set_addon_options_side_effect_fixture( + addon_options: dict[str, Any], +) -> Any | None: """Return the set add-on options side effect.""" - async def set_addon_options(hass: HomeAssistant, slug, options): + async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: """Mock set add-on options.""" addon_options.update(options["options"]) @@ -120,7 +130,9 @@ def set_addon_options_side_effect_fixture(addon_options): @pytest.fixture(name="set_addon_options") -def mock_set_addon_options(set_addon_options_side_effect): +def mock_set_addon_options( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock set add-on options.""" with patch( "homeassistant.components.hassio.addon_manager.async_set_addon_options", @@ -130,7 +142,9 @@ def mock_set_addon_options(set_addon_options_side_effect): @pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): +def install_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: """Return the install add-on side effect.""" async def install_addon(hass: HomeAssistant, slug): @@ -149,7 +163,7 @@ def install_addon_side_effect_fixture(addon_store_info, addon_info): @pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): +def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock install add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_install_addon", @@ -159,7 +173,7 @@ def mock_install_addon(install_addon_side_effect): @pytest.fixture(name="update_addon") -def mock_update_addon(): +def mock_update_addon() -> Generator[AsyncMock]: """Mock update add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_update_addon" @@ -168,7 +182,9 @@ def mock_update_addon(): @pytest.fixture(name="start_addon_side_effect") -def start_addon_side_effect_fixture(addon_store_info, addon_info): +def start_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: """Return the start add-on options side effect.""" async def start_addon(hass: HomeAssistant, slug): @@ -186,7 +202,7 @@ def start_addon_side_effect_fixture(addon_store_info, addon_info): @pytest.fixture(name="start_addon") -def mock_start_addon(start_addon_side_effect): +def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock start add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_start_addon", @@ -196,7 +212,7 @@ def mock_start_addon(start_addon_side_effect): @pytest.fixture(name="stop_addon") -def stop_addon_fixture(): +def stop_addon_fixture() -> Generator[AsyncMock]: """Mock stop add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_stop_addon" @@ -205,13 +221,13 @@ def stop_addon_fixture(): @pytest.fixture(name="restart_addon_side_effect") -def restart_addon_side_effect_fixture(): +def restart_addon_side_effect_fixture() -> Any | None: """Return the restart add-on options side effect.""" return None @pytest.fixture(name="restart_addon") -def mock_restart_addon(restart_addon_side_effect): +def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock restart add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_restart_addon", @@ -221,7 +237,7 @@ def mock_restart_addon(restart_addon_side_effect): @pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): +def uninstall_addon_fixture() -> Generator[AsyncMock]: """Mock uninstall add-on.""" with patch( "homeassistant.components.hassio.addon_manager.async_uninstall_addon" @@ -230,7 +246,7 @@ def uninstall_addon_fixture(): @pytest.fixture(name="create_backup") -def create_backup_fixture(): +def create_backup_fixture() -> Generator[AsyncMock]: """Mock create backup.""" with patch( "homeassistant.components.hassio.addon_manager.async_create_backup" diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index 10fd5edfabb..46172f72b2f 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -1,14 +1,15 @@ """Test the Z-Wave JS config flow.""" import asyncio +from collections.abc import Generator from copy import copy from ipaddress import ip_address -from unittest.mock import DEFAULT, MagicMock, call, patch +from typing import Any +from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch import aiohttp import pytest from serial.tools.list_ports_common import ListPortInfo -from typing_extensions import Generator from zwave_js_server.version import VersionInfo from homeassistant import config_entries @@ -59,7 +60,7 @@ CP2652_ZIGBEE_DISCOVERY_INFO = usb.UsbServiceInfo( @pytest.fixture(name="setup_entry") -def setup_entry_fixture(): +def setup_entry_fixture() -> Generator[AsyncMock]: """Mock entry setup.""" with patch( "homeassistant.components.zwave_js.async_setup_entry", return_value=True @@ -68,7 +69,7 @@ def setup_entry_fixture(): @pytest.fixture(name="supervisor") -def mock_supervisor_fixture(): +def mock_supervisor_fixture() -> Generator[None]: """Mock Supervisor.""" with patch( "homeassistant.components.zwave_js.config_flow.is_hassio", return_value=True @@ -77,19 +78,21 @@ def mock_supervisor_fixture(): @pytest.fixture(name="discovery_info") -def discovery_info_fixture(): +def discovery_info_fixture() -> dict[str, Any]: """Return the discovery info from the supervisor.""" return DEFAULT @pytest.fixture(name="discovery_info_side_effect") -def discovery_info_side_effect_fixture(): +def discovery_info_side_effect_fixture() -> Any | None: """Return the discovery info from the supervisor.""" return None @pytest.fixture(name="get_addon_discovery_info") -def mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect): +def mock_get_addon_discovery_info( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: """Mock get add-on discovery info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", @@ -100,13 +103,15 @@ def mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect): @pytest.fixture(name="server_version_side_effect") -def server_version_side_effect_fixture(): +def server_version_side_effect_fixture() -> Any | None: """Return the server version side effect.""" return None @pytest.fixture(name="get_server_version", autouse=True) -def mock_get_server_version(server_version_side_effect, server_version_timeout): +def mock_get_server_version( + server_version_side_effect: Any | None, server_version_timeout: int +) -> Generator[AsyncMock]: """Mock server version.""" version_info = VersionInfo( driver_version="mock-driver-version", @@ -130,18 +135,18 @@ def mock_get_server_version(server_version_side_effect, server_version_timeout): @pytest.fixture(name="server_version_timeout") -def mock_server_version_timeout(): +def mock_server_version_timeout() -> int: """Patch the timeout for getting server version.""" return SERVER_VERSION_TIMEOUT @pytest.fixture(name="addon_setup_time", autouse=True) -def mock_addon_setup_time(): +def mock_addon_setup_time() -> Generator[None]: """Mock add-on setup sleep time.""" with patch( "homeassistant.components.zwave_js.config_flow.ADDON_SETUP_TIMEOUT", new=0 - ) as addon_setup_time: - yield addon_setup_time + ): + yield @pytest.fixture(name="serial_port") diff --git a/tests/components/zwave_js/test_device_condition.py b/tests/components/zwave_js/test_device_condition.py index 61ed2bb35fb..17bc4cf0f5d 100644 --- a/tests/components/zwave_js/test_device_condition.py +++ b/tests/components/zwave_js/test_device_condition.py @@ -25,13 +25,7 @@ from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_get_conditions( @@ -99,7 +93,7 @@ async def test_node_status_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for node_status conditions.""" @@ -206,8 +200,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "alive - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "alive - event - test_event1" event = Event( "wake up", @@ -225,8 +219,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "awake - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "awake - event - test_event2" event = Event( "sleep", @@ -240,8 +234,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 3 - assert calls[2].data["some"] == "asleep - event - test_event3" + assert len(service_calls) == 3 + assert service_calls[2].data["some"] == "asleep - event - test_event3" event = Event( "dead", @@ -255,8 +249,8 @@ async def test_node_status_state( hass.bus.async_fire("test_event3") hass.bus.async_fire("test_event4") await hass.async_block_till_done() - assert len(calls) == 4 - assert calls[3].data["some"] == "dead - event - test_event4" + assert len(service_calls) == 4 + assert service_calls[3].data["some"] == "dead - event - test_event4" async def test_config_parameter_state( @@ -264,7 +258,7 @@ async def test_config_parameter_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for config_parameter conditions.""" @@ -331,8 +325,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "Beeper - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "Beeper - event - test_event1" # Flip Beeper state to not match condition event = Event( @@ -375,8 +369,8 @@ async def test_config_parameter_state( hass.bus.async_fire("test_event1") hass.bus.async_fire("test_event2") await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[1].data["some"] == "User Slot Status - event - test_event2" + assert len(service_calls) == 2 + assert service_calls[1].data["some"] == "User Slot Status - event - test_event2" async def test_value_state( @@ -384,7 +378,7 @@ async def test_value_state( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, ) -> None: """Test for value conditions.""" @@ -427,8 +421,8 @@ async def test_value_state( hass.bus.async_fire("test_event1") await hass.async_block_till_done() - assert len(calls) == 1 - assert calls[0].data["some"] == "value - event - test_event1" + assert len(service_calls) == 1 + assert service_calls[0].data["some"] == "value - event - test_event1" async def test_get_condition_capabilities_node_status( diff --git a/tests/components/zwave_js/test_device_trigger.py b/tests/components/zwave_js/test_device_trigger.py index 0fa228288ec..ccc69f7723d 100644 --- a/tests/components/zwave_js/test_device_trigger.py +++ b/tests/components/zwave_js/test_device_trigger.py @@ -28,13 +28,7 @@ from homeassistant.helpers import ( ) from homeassistant.setup import async_setup_component -from tests.common import async_get_device_automations, async_mock_service - - -@pytest.fixture -def calls(hass: HomeAssistant) -> list[ServiceCall]: - """Track calls to a mock service.""" - return async_mock_service(hass, "test", "automation") +from tests.common import async_get_device_automations async def test_no_controller_triggers( @@ -85,7 +79,7 @@ async def test_if_notification_notification_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.notification.notification trigger firing.""" node: Node = lock_schlage_be469 @@ -168,13 +162,13 @@ async def test_if_notification_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.NOTIFICATION}" ) @@ -221,7 +215,7 @@ async def test_if_entry_control_notification_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for notification.entry_control trigger firing.""" node: Node = lock_schlage_be469 @@ -303,13 +297,13 @@ async def test_if_entry_control_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.notification.notification - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.notification.notification2 - device - zwave_js_notification - {CommandClass.ENTRY_CONTROL}" ) @@ -389,7 +383,7 @@ async def test_if_node_status_change_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -460,9 +454,9 @@ async def test_if_node_status_change_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == "state.node_status - device - alive" - assert calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(service_calls) == 2 + assert service_calls[0].data["some"] == "state.node_status - device - alive" + assert service_calls[1].data["some"] == "state.node_status2 - device - alive" async def test_if_node_status_change_fires_legacy( @@ -472,7 +466,7 @@ async def test_if_node_status_change_fires_legacy( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for node_status trigger firing.""" node: Node = lock_schlage_be469 @@ -543,9 +537,9 @@ async def test_if_node_status_change_fires_legacy( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 - assert calls[0].data["some"] == "state.node_status - device - alive" - assert calls[1].data["some"] == "state.node_status2 - device - alive" + assert len(service_calls) == 2 + assert service_calls[0].data["some"] == "state.node_status - device - alive" + assert service_calls[1].data["some"] == "state.node_status2 - device - alive" async def test_get_trigger_capabilities_node_status( @@ -645,7 +639,7 @@ async def test_if_basic_value_notification_fires( client, ge_in_wall_dimmer_switch, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.basic trigger firing.""" node: Node = ge_in_wall_dimmer_switch @@ -742,13 +736,13 @@ async def test_if_basic_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.basic - device - zwave_js_value_notification - {CommandClass.BASIC}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.basic2 - device - zwave_js_value_notification - {CommandClass.BASIC}" ) @@ -830,7 +824,7 @@ async def test_if_central_scene_value_notification_fires( client, wallmote_central_scene, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.central_scene trigger firing.""" node: Node = wallmote_central_scene @@ -933,13 +927,13 @@ async def test_if_central_scene_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.central_scene - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.central_scene2 - device - zwave_js_value_notification - {CommandClass.CENTRAL_SCENE}" ) @@ -1020,7 +1014,7 @@ async def test_if_scene_activation_value_notification_fires( client, hank_binary_switch, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for event.value_notification.scene_activation trigger firing.""" node: Node = hank_binary_switch @@ -1117,13 +1111,13 @@ async def test_if_scene_activation_value_notification_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 2 + assert len(service_calls) == 2 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == f"event.value_notification.scene_activation - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) assert ( - calls[1].data["some"] + service_calls[1].data["some"] == f"event.value_notification.scene_activation2 - device - zwave_js_value_notification - {CommandClass.SCENE_ACTIVATION}" ) @@ -1200,7 +1194,7 @@ async def test_if_value_updated_value_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.value trigger firing.""" node: Node = lock_schlage_be469 @@ -1261,7 +1255,7 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 # Publish fake value update that should trigger event = Event( @@ -1283,9 +1277,9 @@ async def test_if_value_updated_value_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "zwave_js.value_updated.value - zwave_js.value_updated - open" ) @@ -1296,7 +1290,7 @@ async def test_value_updated_value_no_driver( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test zwave_js.value_updated.value trigger with missing driver.""" node: Node = lock_schlage_be469 @@ -1362,7 +1356,7 @@ async def test_value_updated_value_no_driver( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 0 + assert len(service_calls) == 0 async def test_get_trigger_capabilities_value_updated_value( @@ -1455,7 +1449,7 @@ async def test_if_value_updated_config_parameter_fires( client, lock_schlage_be469, integration, - calls: list[ServiceCall], + service_calls: list[ServiceCall], ) -> None: """Test for zwave_js.value_updated.config_parameter trigger firing.""" node: Node = lock_schlage_be469 @@ -1517,9 +1511,9 @@ async def test_if_value_updated_config_parameter_fires( ) node.receive_event(event) await hass.async_block_till_done() - assert len(calls) == 1 + assert len(service_calls) == 1 assert ( - calls[0].data["some"] + service_calls[0].data["some"] == "zwave_js.value_updated.config_parameter - zwave_js.value_updated - 255" ) diff --git a/tests/components/zwave_js/test_fan.py b/tests/components/zwave_js/test_fan.py index 03cd6bfb704..2551fc7b34a 100644 --- a/tests/components/zwave_js/test_fan.py +++ b/tests/components/zwave_js/test_fan.py @@ -653,7 +653,12 @@ async def test_thermostat_fan( assert state.state == STATE_ON assert state.attributes.get(ATTR_FAN_STATE) == "Idle / off" assert state.attributes.get(ATTR_PRESET_MODE) == "Auto low" - assert state.attributes.get(ATTR_SUPPORTED_FEATURES) == FanEntityFeature.PRESET_MODE + assert ( + state.attributes.get(ATTR_SUPPORTED_FEATURES) + == FanEntityFeature.PRESET_MODE + | FanEntityFeature.TURN_OFF + | FanEntityFeature.TURN_ON + ) # Test setting preset mode await hass.services.async_call( diff --git a/tests/components/zwave_js/test_helpers.py b/tests/components/zwave_js/test_helpers.py index 016a2d718ac..2df2e134f49 100644 --- a/tests/components/zwave_js/test_helpers.py +++ b/tests/components/zwave_js/test_helpers.py @@ -42,4 +42,4 @@ async def test_get_value_state_schema_boolean_config_value( aeon_smart_switch_6.values["102-112-0-255"] ) assert isinstance(schema_validator, vol.Coerce) - assert schema_validator.type == bool + assert schema_validator.type is bool diff --git a/tests/conftest.py b/tests/conftest.py index 161ff458ac0..0667edf4be2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,8 +3,8 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Coroutine -from contextlib import asynccontextmanager, contextmanager +from collections.abc import AsyncGenerator, Callable, Coroutine, Generator +from contextlib import AsyncExitStack, asynccontextmanager, contextmanager import datetime import functools import gc @@ -12,6 +12,7 @@ import itertools import logging import os import reprlib +from shutil import rmtree import sqlite3 import ssl import threading @@ -33,12 +34,16 @@ import multidict import pytest import pytest_socket import requests_mock +import respx from syrupy.assertion import SnapshotAssertion -from typing_extensions import AsyncGenerator, Generator from homeassistant import block_async_io +from homeassistant.exceptions import ServiceNotFound -# Setup patching if dt_util time functions before any other Home Assistant imports +# Setup patching of recorder functions before any other Home Assistant imports +from . import patch_recorder # noqa: F401, isort:skip + +# Setup patching of dt_util time functions before any other Home Assistant imports from . import patch_time # noqa: F401, isort:skip from homeassistant import core as ha, loader, runner @@ -54,8 +59,9 @@ from homeassistant.components.websocket_api.auth import ( from homeassistant.components.websocket_api.http import URL from homeassistant.config import YAML_CONFIG_FILE from homeassistant.config_entries import ConfigEntries, ConfigEntry, ConfigEntryState -from homeassistant.const import HASSIO_USER_NAME +from homeassistant.const import BASE_PLATFORMS, HASSIO_USER_NAME from homeassistant.core import ( + Context, CoreState, HassJob, HomeAssistant, @@ -76,7 +82,7 @@ from homeassistant.helpers import ( from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.translation import _TranslationsCacheData from homeassistant.helpers.typing import ConfigType -from homeassistant.setup import BASE_PLATFORMS, async_setup_component +from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location from homeassistant.util.async_ import create_eager_task from homeassistant.util.json import json_loads @@ -393,6 +399,13 @@ def verify_cleanup( # Restore the default time zone to not break subsequent tests dt_util.DEFAULT_TIME_ZONE = datetime.UTC + try: + # Verify respx.mock has been cleaned up + assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock" + finally: + # Clear mock routes not break subsequent tests + respx.mock.clear() + @pytest.fixture(autouse=True) def reset_hass_threading_local_object() -> Generator[None]: @@ -892,7 +905,7 @@ def fail_on_log_exception( return def log_exception(format_err, *args): - raise # pylint: disable=misplaced-bare-raise + raise # noqa: PLE0704 monkeypatch.setattr("homeassistant.util.logging.log_exception", log_exception) @@ -1299,6 +1312,16 @@ def enable_migrate_entity_ids() -> bool: return False +@pytest.fixture +def enable_migrate_event_ids() -> bool: + """Fixture to control enabling of recorder's event id migration. + + To enable context id migration, tests can be marked with: + @pytest.mark.parametrize("enable_migrate_event_ids", [True]) + """ + return False + + @pytest.fixture def recorder_config() -> dict[str, Any] | None: """Fixture to override recorder config. @@ -1309,16 +1332,36 @@ def recorder_config() -> dict[str, Any] | None: return None +@pytest.fixture +def persistent_database() -> bool: + """Fixture to control if database should persist when recorder is shut down in test. + + When using sqlite, this uses on disk database instead of in memory database. + This does nothing when using mysql or postgresql. + + Note that the database is always destroyed in between tests. + + To use a persistent database, tests can be marked with: + @pytest.mark.parametrize("persistent_database", [True]) + """ + return False + + @pytest.fixture def recorder_db_url( pytestconfig: pytest.Config, hass_fixture_setup: list[bool], + persistent_database: str, + tmp_path_factory: pytest.TempPathFactory, ) -> Generator[str]: """Prepare a default database for tests and return a connection URL.""" assert not hass_fixture_setup db_url = cast(str, pytestconfig.getoption("dburl")) - if db_url.startswith("mysql://"): + if db_url == "sqlite://" and persistent_database: + tmp_path = tmp_path_factory.mktemp("recorder") + db_url = "sqlite:///" + str(tmp_path / "pytest.db") + elif db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy_utils @@ -1332,7 +1375,9 @@ def recorder_db_url( assert not sqlalchemy_utils.database_exists(db_url) sqlalchemy_utils.create_database(db_url, encoding="utf8") yield db_url - if db_url.startswith("mysql://"): + if db_url == "sqlite://" and persistent_database: + rmtree(tmp_path, ignore_errors=True) + elif db_url.startswith("mysql://"): # pylint: disable-next=import-outside-toplevel import sqlalchemy as sa @@ -1360,6 +1405,9 @@ async def _async_init_recorder_component( hass: HomeAssistant, add_config: dict[str, Any] | None = None, db_url: str | None = None, + *, + expected_setup_result: bool, + wait_setup: bool, ) -> None: """Initialize the recorder asynchronously.""" # pylint: disable-next=import-outside-toplevel @@ -1374,18 +1422,34 @@ async def _async_init_recorder_component( with patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True): if recorder.DOMAIN not in hass.data: recorder_helper.async_initialize_recorder(hass) - assert await async_setup_component( - hass, recorder.DOMAIN, {recorder.DOMAIN: config} + setup_task = asyncio.ensure_future( + async_setup_component(hass, recorder.DOMAIN, {recorder.DOMAIN: config}) ) - assert recorder.DOMAIN in hass.config.components + if wait_setup: + # Wait for recorder integration to setup + setup_result = await setup_task + assert setup_result == expected_setup_result + assert (recorder.DOMAIN in hass.config.components) == expected_setup_result + else: + # Wait for recorder to connect to the database + await recorder_helper.async_wait_recorder(hass) _LOGGER.info( "Test recorder successfully started, database location: %s", config[recorder.CONF_DB_URL], ) +class ThreadSession(threading.local): + """Keep track of session per thread.""" + + has_session = False + + +thread_session = ThreadSession() + + @pytest.fixture -async def async_setup_recorder_instance( +async def async_test_recorder( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, @@ -1393,8 +1457,9 @@ async def async_setup_recorder_instance( enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, + enable_migrate_event_ids: bool, ) -> AsyncGenerator[RecorderInstanceGenerator]: - """Yield callable to setup recorder instance.""" + """Yield context manager to setup recorder instance.""" # pylint: disable-next=import-outside-toplevel from homeassistant.components import recorder @@ -1404,6 +1469,39 @@ async def async_setup_recorder_instance( # pylint: disable-next=import-outside-toplevel from .components.recorder.common import async_recorder_block_till_done + # pylint: disable-next=import-outside-toplevel + from .patch_recorder import real_session_scope + + if TYPE_CHECKING: + # pylint: disable-next=import-outside-toplevel + from sqlalchemy.orm.session import Session + + @contextmanager + def debug_session_scope( + *, + hass: HomeAssistant | None = None, + session: Session | None = None, + exception_filter: Callable[[Exception], bool] | None = None, + read_only: bool = False, + ) -> Generator[Session]: + """Wrap session_scope to bark if we create nested sessions.""" + if thread_session.has_session: + raise RuntimeError( + f"Thread '{threading.current_thread().name}' already has an " + "active session" + ) + thread_session.has_session = True + try: + with real_session_scope( + hass=hass, + session=session, + exception_filter=exception_filter, + read_only=read_only, + ) as ses: + yield ses + finally: + thread_session.has_session = False + nightly = recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None stats = recorder.Recorder.async_periodic_statistics if enable_statistics else None schema_validate = ( @@ -1417,22 +1515,27 @@ async def async_setup_recorder_instance( else None ) migrate_states_context_ids = ( - recorder.Recorder._migrate_states_context_ids + migration.StatesContextIDMigration.migrate_data if enable_migrate_context_ids else None ) migrate_events_context_ids = ( - recorder.Recorder._migrate_events_context_ids + migration.EventsContextIDMigration.migrate_data if enable_migrate_context_ids else None ) migrate_event_type_ids = ( - recorder.Recorder._migrate_event_type_ids + migration.EventTypeIDMigration.migrate_data if enable_migrate_event_type_ids else None ) migrate_entity_ids = ( - recorder.Recorder._migrate_entity_ids if enable_migrate_entity_ids else None + migration.EntityIDMigration.migrate_data if enable_migrate_entity_ids else None + ) + legacy_event_id_foreign_key_exists = ( + migration.EventIDPostMigration._legacy_event_id_foreign_key_exists + if enable_migrate_event_ids + else lambda _: None ) with ( patch( @@ -1451,43 +1554,101 @@ async def async_setup_recorder_instance( autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_events_context_ids", + "homeassistant.components.recorder.migration.EventsContextIDMigration.migrate_data", side_effect=migrate_events_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_states_context_ids", + "homeassistant.components.recorder.migration.StatesContextIDMigration.migrate_data", side_effect=migrate_states_context_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_event_type_ids", + "homeassistant.components.recorder.migration.EventTypeIDMigration.migrate_data", side_effect=migrate_event_type_ids, autospec=True, ), patch( - "homeassistant.components.recorder.Recorder._migrate_entity_ids", + "homeassistant.components.recorder.migration.EntityIDMigration.migrate_data", side_effect=migrate_entity_ids, autospec=True, ), + patch( + "homeassistant.components.recorder.migration.EventIDPostMigration._legacy_event_id_foreign_key_exists", + side_effect=legacy_event_id_foreign_key_exists, + autospec=True, + ), patch( "homeassistant.components.recorder.Recorder._schedule_compile_missing_statistics", side_effect=compile_missing, autospec=True, ), + patch.object( + patch_recorder, + "real_session_scope", + side_effect=debug_session_scope, + autospec=True, + ), ): - async def async_setup_recorder( - hass: HomeAssistant, config: ConfigType | None = None - ) -> recorder.Recorder: + @asynccontextmanager + async def async_test_recorder( + hass: HomeAssistant, + config: ConfigType | None = None, + *, + expected_setup_result: bool = True, + wait_recorder: bool = True, + wait_recorder_setup: bool = True, + ) -> AsyncGenerator[recorder.Recorder]: """Setup and return recorder instance.""" # noqa: D401 - await _async_init_recorder_component(hass, config, recorder_db_url) + await _async_init_recorder_component( + hass, + config, + recorder_db_url, + expected_setup_result=expected_setup_result, + wait_setup=wait_recorder_setup, + ) await hass.async_block_till_done() instance = hass.data[recorder.DATA_INSTANCE] # The recorder's worker is not started until Home Assistant is running - if hass.state is CoreState.running: + if hass.state is CoreState.running and wait_recorder: await async_recorder_block_till_done(hass) - return instance + try: + yield instance + finally: + if instance.is_alive(): + await instance._async_shutdown(None) + + yield async_test_recorder + + +@pytest.fixture +async def async_setup_recorder_instance( + async_test_recorder: RecorderInstanceGenerator, +) -> AsyncGenerator[RecorderInstanceGenerator]: + """Yield callable to setup recorder instance.""" + + async with AsyncExitStack() as stack: + + async def async_setup_recorder( + hass: HomeAssistant, + config: ConfigType | None = None, + *, + expected_setup_result: bool = True, + wait_recorder: bool = True, + wait_recorder_setup: bool = True, + ) -> AsyncGenerator[recorder.Recorder]: + """Set up and return recorder instance.""" + + return await stack.enter_async_context( + async_test_recorder( + hass, + config, + expected_setup_result=expected_setup_result, + wait_recorder=wait_recorder, + wait_recorder_setup=wait_recorder_setup, + ) + ) yield async_setup_recorder @@ -1495,11 +1656,12 @@ async def async_setup_recorder_instance( @pytest.fixture async def recorder_mock( recorder_config: dict[str, Any] | None, - async_setup_recorder_instance: RecorderInstanceGenerator, + async_test_recorder: RecorderInstanceGenerator, hass: HomeAssistant, -) -> recorder.Recorder: +) -> AsyncGenerator[recorder.Recorder]: """Fixture with in-memory recorder.""" - return await async_setup_recorder_instance(hass, recorder_config) + async with async_test_recorder(hass, recorder_config) as instance: + yield instance @pytest.fixture @@ -1662,7 +1824,7 @@ def label_registry(hass: HomeAssistant) -> lr.LabelRegistry: @pytest.fixture -def service_calls(hass: HomeAssistant) -> Generator[None, None, list[ServiceCall]]: +def service_calls(hass: HomeAssistant) -> Generator[list[ServiceCall]]: """Track all service calls.""" calls = [] @@ -1673,17 +1835,25 @@ def service_calls(hass: HomeAssistant) -> Generator[None, None, list[ServiceCall domain: str, service: str, service_data: dict[str, Any] | None = None, - **kwargs: Any, + blocking: bool = False, + context: Context | None = None, + target: dict[str, Any] | None = None, + return_response: bool = False, ) -> ServiceResponse: - calls.append(ServiceCall(domain, service, service_data)) + calls.append( + ServiceCall(domain, service, service_data, context, return_response) + ) try: return await _original_async_call( domain, service, service_data, - **kwargs, + blocking, + context, + target, + return_response, ) - except ha.ServiceNotFound: + except ServiceNotFound: _LOGGER.debug("Ignoring unknown service call to %s.%s", domain, service) return None @@ -1698,7 +1868,7 @@ def snapshot(snapshot: SnapshotAssertion) -> SnapshotAssertion: @pytest.fixture -def disable_block_async_io() -> Generator[Any, Any, None]: +def disable_block_async_io() -> Generator[None]: """Fixture to disable the loop protection from block_async_io.""" yield calls = block_async_io._BLOCKED_CALLS.calls diff --git a/tests/helpers/test_aiohttp_client.py b/tests/helpers/test_aiohttp_client.py index 7dd34fd2c64..4feb03493e9 100644 --- a/tests/helpers/test_aiohttp_client.py +++ b/tests/helpers/test_aiohttp_client.py @@ -1,5 +1,6 @@ """Test the aiohttp client helper.""" +import socket from unittest.mock import Mock, patch import aiohttp @@ -16,9 +17,10 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL, + EVENT_HOMEASSISTANT_CLOSE, HTTP_BASIC_AUTHENTICATION, ) -from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant +from homeassistant.core import HomeAssistant import homeassistant.helpers.aiohttp_client as client from homeassistant.util.color import RGBColor @@ -82,7 +84,14 @@ async def test_get_clientsession_without_ssl(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("verify_ssl", "expected_family"), - [(True, 0), (False, 0), (True, 4), (False, 4), (True, 6), (False, 6)], + [ + (True, socket.AF_UNSPEC), + (False, socket.AF_UNSPEC), + (True, socket.AF_INET), + (False, socket.AF_INET), + (True, socket.AF_INET6), + (False, socket.AF_INET6), + ], ) async def test_get_clientsession( hass: HomeAssistant, verify_ssl: bool, expected_family: int diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index e6d637d1a99..ad571ac50cc 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -1,8 +1,10 @@ """Tests for the Area Registry.""" +from datetime import datetime, timedelta from functools import partial from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -11,6 +13,7 @@ from homeassistant.helpers import ( floor_registry as fr, label_registry as lr, ) +from homeassistant.util.dt import utcnow from tests.common import ANY, async_capture_events, flush_store @@ -24,7 +27,11 @@ async def test_list_areas(area_registry: ar.AreaRegistry) -> None: assert len(areas) == len(area_registry.areas) -async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) -> None: +async def test_create_area( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + area_registry: ar.AreaRegistry, +) -> None: """Make sure that we can create an area.""" update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) @@ -40,9 +47,13 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) name="mock", normalized_name=ANY, picture=None, + created_at=utcnow(), + modified_at=utcnow(), ) assert len(area_registry.areas) == 1 + freezer.tick(timedelta(minutes=5)) + await hass.async_block_till_done() assert len(update_events) == 1 @@ -52,14 +63,14 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) } # Create area with all parameters - area = area_registry.async_create( + area2 = area_registry.async_create( "mock 2", aliases={"alias_1", "alias_2"}, labels={"label1", "label2"}, picture="/image/example.png", ) - assert area == ar.AreaEntry( + assert area2 == ar.AreaEntry( aliases={"alias_1", "alias_2"}, floor_id=None, icon=None, @@ -68,15 +79,19 @@ async def test_create_area(hass: HomeAssistant, area_registry: ar.AreaRegistry) name="mock 2", normalized_name=ANY, picture="/image/example.png", + created_at=utcnow(), + modified_at=utcnow(), ) assert len(area_registry.areas) == 2 + assert area.created_at != area2.created_at + assert area.modified_at != area2.modified_at await hass.async_block_till_done() assert len(update_events) == 2 assert update_events[-1].data == { "action": "create", - "area_id": area.id, + "area_id": area2.id, } @@ -150,11 +165,18 @@ async def test_update_area( area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can read areas.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, ar.EVENT_AREA_REGISTRY_UPDATED) floor_registry.async_create("first") area = area_registry.async_create("mock") + assert area.modified_at == created_at + + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_area = area_registry.async_update( area.id, @@ -176,6 +198,8 @@ async def test_update_area( name="mock1", normalized_name=ANY, picture="/image/example.png", + created_at=created_at, + modified_at=modified_at, ) assert len(area_registry.areas) == 1 @@ -285,6 +309,8 @@ async def test_loading_area_from_storage( "labels": ["mock-label1", "mock-label2"], "name": "mock", "picture": "blah", + "created_at": utcnow().isoformat(), + "modified_at": utcnow().isoformat(), } ] }, @@ -329,6 +355,8 @@ async def test_migration_from_1_1( "labels": [], "name": "mock", "picture": None, + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", } ] }, diff --git a/tests/helpers/test_category_registry.py b/tests/helpers/test_category_registry.py index 1317750ebec..cad997fd50f 100644 --- a/tests/helpers/test_category_registry.py +++ b/tests/helpers/test_category_registry.py @@ -1,13 +1,16 @@ """Tests for the category registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import category_registry as cr +from homeassistant.util.dt import UTC from tests.common import async_capture_events, flush_store @@ -152,9 +155,13 @@ async def test_delete_non_existing_category( async def test_update_category( - hass: HomeAssistant, category_registry: cr.CategoryRegistry + hass: HomeAssistant, + category_registry: cr.CategoryRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update categories.""" + created = datetime(2024, 2, 14, 12, 0, 0, tzinfo=UTC) + freezer.move_to(created) update_events = async_capture_events(hass, cr.EVENT_CATEGORY_REGISTRY_UPDATED) category = category_registry.async_create( scope="automation", @@ -162,9 +169,16 @@ async def test_update_category( ) assert len(category_registry.categories["automation"]) == 1 - assert category.category_id - assert category.name == "Energy saving" - assert category.icon is None + assert category == cr.CategoryEntry( + category_id=category.category_id, + created_at=created, + modified_at=created, + name="Energy saving", + icon=None, + ) + + modified = datetime(2024, 3, 14, 12, 0, 0, tzinfo=UTC) + freezer.move_to(modified) updated_category = category_registry.async_update( scope="automation", @@ -174,9 +188,13 @@ async def test_update_category( ) assert updated_category != category - assert updated_category.category_id == category.category_id - assert updated_category.name == "ENERGY SAVING" - assert updated_category.icon == "mdi:leaf" + assert updated_category == cr.CategoryEntry( + category_id=category.category_id, + created_at=created, + modified_at=modified, + name="ENERGY SAVING", + icon="mdi:leaf", + ) assert len(category_registry.categories["automation"]) == 1 @@ -343,18 +361,25 @@ async def test_loading_categories_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any] ) -> None: """Test loading stored categories on start.""" + date_1 = datetime(2024, 2, 14, 12, 0, 0) + date_2 = datetime(2024, 2, 14, 12, 0, 0) hass_storage[cr.STORAGE_KEY] = { "version": cr.STORAGE_VERSION_MAJOR, + "minor_version": cr.STORAGE_VERSION_MINOR, "data": { "categories": { "automation": [ { "category_id": "uuid1", + "created_at": date_1.isoformat(), + "modified_at": date_1.isoformat(), "name": "Energy saving", "icon": "mdi:leaf", }, { "category_id": "uuid2", + "created_at": date_1.isoformat(), + "modified_at": date_2.isoformat(), "name": "Something else", "icon": None, }, @@ -362,6 +387,8 @@ async def test_loading_categories_from_storage( "zone": [ { "category_id": "uuid3", + "created_at": date_2.isoformat(), + "modified_at": date_2.isoformat(), "name": "Grocery stores", "icon": "mdi:store", }, @@ -380,21 +407,33 @@ async def test_loading_categories_from_storage( category1 = category_registry.async_get_category( scope="automation", category_id="uuid1" ) - assert category1.category_id == "uuid1" - assert category1.name == "Energy saving" - assert category1.icon == "mdi:leaf" + assert category1 == cr.CategoryEntry( + category_id="uuid1", + created_at=date_1, + modified_at=date_1, + name="Energy saving", + icon="mdi:leaf", + ) category2 = category_registry.async_get_category( scope="automation", category_id="uuid2" ) - assert category2.category_id == "uuid2" - assert category2.name == "Something else" - assert category2.icon is None + assert category2 == cr.CategoryEntry( + category_id="uuid2", + created_at=date_1, + modified_at=date_2, + name="Something else", + icon=None, + ) category3 = category_registry.async_get_category(scope="zone", category_id="uuid3") - assert category3.category_id == "uuid3" - assert category3.name == "Grocery stores" - assert category3.icon == "mdi:store" + assert category3 == cr.CategoryEntry( + category_id="uuid3", + created_at=date_2, + modified_at=date_2, + name="Grocery stores", + icon="mdi:store", + ) async def test_async_create_thread_safety( @@ -447,3 +486,83 @@ async def test_async_update_thread_safety( name="new name", ) ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[cr.STORAGE_KEY] = { + "version": 1, + "data": { + "categories": { + "automation": [ + { + "category_id": "uuid1", + "name": "Energy saving", + "icon": "mdi:leaf", + }, + { + "category_id": "uuid2", + "name": "Something else", + "icon": None, + }, + ], + "zone": [ + { + "category_id": "uuid3", + "name": "Grocery stores", + "icon": "mdi:store", + }, + ], + } + }, + } + + await cr.async_load(hass) + registry = cr.async_get(hass) + + # Test data was loaded + assert len(registry.categories) == 2 + assert len(registry.categories["automation"]) == 2 + assert len(registry.categories["zone"]) == 1 + + assert registry.async_get_category(scope="automation", category_id="uuid1") + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[cr.STORAGE_KEY] == { + "version": cr.STORAGE_VERSION_MAJOR, + "minor_version": cr.STORAGE_VERSION_MINOR, + "key": cr.STORAGE_KEY, + "data": { + "categories": { + "automation": [ + { + "category_id": "uuid1", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Energy saving", + "icon": "mdi:leaf", + }, + { + "category_id": "uuid2", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Something else", + "icon": None, + }, + ], + "zone": [ + { + "category_id": "uuid3", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + "name": "Grocery stores", + "icon": "mdi:store", + }, + ], + } + }, + } diff --git a/tests/helpers/test_config_entry_flow.py b/tests/helpers/test_config_entry_flow.py index 6a198b7a297..498e57d45a4 100644 --- a/tests/helpers/test_config_entry_flow.py +++ b/tests/helpers/test_config_entry_flow.py @@ -1,9 +1,9 @@ """Tests for the Config Entry Flow helper.""" +from collections.abc import Generator from unittest.mock import Mock, PropertyMock, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup from homeassistant.config import async_process_ha_core_config diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 132a0b41707..23919f3a6a3 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -1,5 +1,6 @@ """Tests for the Somfy config flow.""" +from collections.abc import Generator from http import HTTPStatus import logging import time @@ -8,7 +9,6 @@ from unittest.mock import patch import aiohttp import pytest -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, setup from homeassistant.core import HomeAssistant diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 6df29eefaff..cf72012a1f1 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -6,6 +6,7 @@ import enum import logging import os from socket import _GLOBAL_DEFAULT_TIMEOUT +from typing import Any from unittest.mock import Mock, patch import uuid @@ -193,12 +194,12 @@ def test_platform_config() -> None: def test_ensure_list() -> None: """Test ensure_list.""" schema = vol.Schema(cv.ensure_list) - assert [] == schema(None) - assert [1] == schema(1) - assert [1] == schema([1]) - assert ["1"] == schema("1") - assert ["1"] == schema(["1"]) - assert [{"1": "2"}] == schema({"1": "2"}) + assert schema(None) == [] + assert schema(1) == [1] + assert schema([1]) == [1] + assert schema("1") == ["1"] + assert schema(["1"]) == ["1"] + assert schema({"1": "2"}) == [{"1": "2"}] def test_entity_id() -> None: @@ -416,27 +417,9 @@ def test_service() -> None: schema("homeassistant.turn_on") -def test_service_schema(hass: HomeAssistant) -> None: - """Test service_schema validation.""" - options = ( - {}, - None, - { - "service": "homeassistant.turn_on", - "service_template": "homeassistant.turn_on", - }, - {"data": {"entity_id": "light.kitchen"}}, - {"service": "homeassistant.turn_on", "data": None}, - { - "service": "homeassistant.turn_on", - "data_template": {"brightness": "{{ no_end"}, - }, - ) - for value in options: - with pytest.raises(vol.MultipleInvalid): - cv.SERVICE_SCHEMA(value) - - options = ( +@pytest.mark.parametrize( + "config", + [ {"service": "homeassistant.turn_on"}, {"service": "homeassistant.turn_on", "entity_id": "light.kitchen"}, {"service": "light.turn_on", "entity_id": "all"}, @@ -450,14 +433,70 @@ def test_service_schema(hass: HomeAssistant) -> None: "alias": "turn on kitchen lights", }, {"service": "scene.turn_on", "metadata": {}}, - ) - for value in options: - cv.SERVICE_SCHEMA(value) + {"action": "homeassistant.turn_on"}, + {"action": "homeassistant.turn_on", "entity_id": "light.kitchen"}, + {"action": "light.turn_on", "entity_id": "all"}, + { + "action": "homeassistant.turn_on", + "entity_id": ["light.kitchen", "light.ceiling"], + }, + { + "action": "light.turn_on", + "entity_id": "all", + "alias": "turn on kitchen lights", + }, + {"action": "scene.turn_on", "metadata": {}}, + ], +) +def test_service_schema(hass: HomeAssistant, config: dict[str, Any]) -> None: + """Test service_schema validation.""" + validated = cv.SERVICE_SCHEMA(config) - # Check metadata is removed from the validated output - assert cv.SERVICE_SCHEMA({"service": "scene.turn_on", "metadata": {}}) == { - "service": "scene.turn_on" - } + # Ensure metadata is removed from the validated output + assert "metadata" not in validated + + # Ensure service is migrated to action + assert "service" not in validated + assert "action" in validated + assert validated["action"] == config.get("service", config["action"]) + + +@pytest.mark.parametrize( + "config", + [ + {}, + None, + {"data": {"entity_id": "light.kitchen"}}, + { + "service": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"service": "homeassistant.turn_on", "data": None}, + { + "service": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + { + "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", + }, + { + "action": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"action": "homeassistant.turn_on", "data": None}, + { + "action": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + ], +) +def test_invalid_service_schema( + hass: HomeAssistant, config: dict[str, Any] | None +) -> None: + """Test service_schema validation fails.""" + with pytest.raises(vol.MultipleInvalid): + cv.SERVICE_SCHEMA(config) def test_entity_service_schema() -> None: @@ -865,7 +904,7 @@ def schema(): @pytest.fixture -def version(monkeypatch): +def version(monkeypatch: pytest.MonkeyPatch) -> None: """Patch the version used for testing to 0.5.0.""" monkeypatch.setattr(homeassistant.const, "__version__", "0.5.0") @@ -965,7 +1004,7 @@ def test_deprecated_with_replacement_key( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} caplog.clear() assert len(caplog.records) == 0 @@ -1036,7 +1075,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} caplog.clear() assert len(caplog.records) == 0 @@ -1049,7 +1088,7 @@ def test_deprecated_with_replacement_key_and_default( test_data = {"venus": True} output = deprecated_schema(test_data.copy()) assert len(caplog.records) == 0 - assert {"venus": True, "jupiter": False} == output + assert output == {"venus": True, "jupiter": False} deprecated_schema_with_default = vol.All( vol.Schema( @@ -1068,7 +1107,7 @@ def test_deprecated_with_replacement_key_and_default( assert ( "The 'mars' option is deprecated, please replace it with 'jupiter'" ) in caplog.text - assert {"jupiter": True} == output + assert output == {"jupiter": True} def test_deprecated_cant_find_module() -> None: diff --git a/tests/helpers/test_device.py b/tests/helpers/test_device.py index 72c602bec48..852d418da23 100644 --- a/tests/helpers/test_device.py +++ b/tests/helpers/test_device.py @@ -169,7 +169,7 @@ async def test_remove_stale_device_links_keep_entity_device( config_entry.entry_id ) - # After cleanup, only one device is expected to be linked to the configuration entry if at least source_entity_id_or_uuid or device_id was given, else zero + # After cleanup, only one device is expected to be linked to the config entry assert len(devices_config_entry) == 1 assert current_device in devices_config_entry @@ -220,7 +220,7 @@ async def test_remove_stale_devices_links_keep_current_device( config_entry.entry_id ) - # After cleanup, only one device is expected to be linked to the configuration entry + # After cleanup, only one device is expected to be linked to the config entry assert len(devices_config_entry) == 1 assert current_device in devices_config_entry diff --git a/tests/helpers/test_device_registry.py b/tests/helpers/test_device_registry.py index 3a525f00870..129c6b0d37c 100644 --- a/tests/helpers/test_device_registry.py +++ b/tests/helpers/test_device_registry.py @@ -2,11 +2,13 @@ from collections.abc import Iterable from contextlib import AbstractContextManager, nullcontext +from datetime import datetime from functools import partial import time from typing import Any from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory import pytest from yarl import URL @@ -19,6 +21,7 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, ) +from homeassistant.util.dt import utcnow from tests.common import ( MockConfigEntry, @@ -177,12 +180,15 @@ async def test_multiple_config_entries( @pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") async def test_loading_from_storage( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: """Test loading stored devices on start.""" + created_at = "2024-01-01T00:00:00+00:00" + modified_at = "2024-02-01T00:00:00+00:00" hass_storage[dr.STORAGE_KEY] = { "version": dr.STORAGE_VERSION_MAJOR, "minor_version": dr.STORAGE_VERSION_MINOR, @@ -193,6 +199,7 @@ async def test_loading_from_storage( "config_entries": [mock_config_entry.entry_id], "configuration_url": "https://example.com/config", "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": created_at, "disabled_by": dr.DeviceEntryDisabler.USER, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": "hw_version", @@ -201,6 +208,8 @@ async def test_loading_from_storage( "labels": {"label1", "label2"}, "manufacturer": "manufacturer", "model": "model", + "model_id": "model_id", + "modified_at": modified_at, "name_by_user": "Test Friendly Name", "name": "name", "primary_config_entry": mock_config_entry.entry_id, @@ -213,8 +222,10 @@ async def test_loading_from_storage( { "config_entries": [mock_config_entry.entry_id], "connections": [["Zigbee", "23.45.67.89.01"]], + "created_at": created_at, "id": "bcdefghijklmn", "identifiers": [["serial", "3456ABCDEF12"]], + "modified_at": modified_at, "orphaned_timestamp": None, } ], @@ -226,6 +237,16 @@ async def test_loading_from_storage( assert len(registry.devices) == 1 assert len(registry.deleted_devices) == 1 + assert registry.deleted_devices["bcdefghijklmn"] == dr.DeletedDeviceEntry( + config_entries={mock_config_entry.entry_id}, + connections={("Zigbee", "23.45.67.89.01")}, + created_at=datetime.fromisoformat(created_at), + id="bcdefghijklmn", + identifiers={("serial", "3456ABCDEF12")}, + modified_at=datetime.fromisoformat(modified_at), + orphaned_timestamp=None, + ) + entry = registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, connections={("Zigbee", "01.23.45.67.89")}, @@ -238,6 +259,7 @@ async def test_loading_from_storage( config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("Zigbee", "01.23.45.67.89")}, + created_at=datetime.fromisoformat(created_at), disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -246,6 +268,8 @@ async def test_loading_from_storage( labels={"label1", "label2"}, manufacturer="manufacturer", model="model", + model_id="model_id", + modified_at=datetime.fromisoformat(modified_at), name_by_user="Test Friendly Name", name="name", primary_config_entry=mock_config_entry.entry_id, @@ -268,10 +292,12 @@ async def test_loading_from_storage( assert entry == dr.DeviceEntry( config_entries={mock_config_entry.entry_id}, connections={("Zigbee", "23.45.67.89.01")}, + created_at=datetime.fromisoformat(created_at), id="bcdefghijklmn", identifiers={("serial", "3456ABCDEF12")}, manufacturer="manufacturer", model="model", + modified_at=utcnow(), primary_config_entry=mock_config_entry.entry_id, ) assert entry.id == "bcdefghijklmn" @@ -281,12 +307,13 @@ async def test_loading_from_storage( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_1_to_1_6( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_1_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.1 to 1.6.""" + """Test migration from version 1.1 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 1, @@ -365,6 +392,7 @@ async def test_migration_1_1_to_1_6( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -373,6 +401,8 @@ async def test_migration_1_1_to_1_6( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -385,6 +415,7 @@ async def test_migration_1_1_to_1_6( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -393,6 +424,8 @@ async def test_migration_1_1_to_1_6( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -405,8 +438,10 @@ async def test_migration_1_1_to_1_6( { "config_entries": ["123456"], "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "id": "deletedid", "identifiers": [["serial", "123456ABCDFF"]], + "modified_at": "1970-01-01T00:00:00+00:00", "orphaned_timestamp": None, } ], @@ -415,12 +450,13 @@ async def test_migration_1_1_to_1_6( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_2_to_1_6( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_2_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.2 to 1.6.""" + """Test migration from version 1.2 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 2, @@ -438,6 +474,7 @@ async def test_migration_1_2_to_1_6( "identifiers": [["serial", "123456ABCDEF"]], "manufacturer": "manufacturer", "model": "model", + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "sw_version": "version", @@ -454,6 +491,7 @@ async def test_migration_1_2_to_1_6( "identifiers": [["serial", "mock-id-invalid-entry"]], "manufacturer": None, "model": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "sw_version": None, @@ -498,6 +536,7 @@ async def test_migration_1_2_to_1_6( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": None, @@ -506,6 +545,8 @@ async def test_migration_1_2_to_1_6( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -518,6 +559,7 @@ async def test_migration_1_2_to_1_6( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -526,6 +568,8 @@ async def test_migration_1_2_to_1_6( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -540,12 +584,13 @@ async def test_migration_1_2_to_1_6( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_3_to_1_6( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_3_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.3 to 1.6.""" + """Test migration from version 1.3 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 3, @@ -625,6 +670,7 @@ async def test_migration_1_3_to_1_6( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -633,6 +679,8 @@ async def test_migration_1_3_to_1_6( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -645,6 +693,7 @@ async def test_migration_1_3_to_1_6( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -653,6 +702,8 @@ async def test_migration_1_3_to_1_6( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name": None, "name_by_user": None, "primary_config_entry": None, @@ -667,12 +718,13 @@ async def test_migration_1_3_to_1_6( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_4_to_1_6( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_4_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.4 to 1.6.""" + """Test migration from version 1.4 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 4, @@ -754,6 +806,7 @@ async def test_migration_1_4_to_1_6( "config_entries": [mock_config_entry.entry_id], "configuration_url": None, "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": "service", "hw_version": "hw_version", @@ -762,6 +815,8 @@ async def test_migration_1_4_to_1_6( "labels": [], "manufacturer": "manufacturer", "model": "model", + "model_id": None, + "modified_at": utcnow().isoformat(), "name": "name", "name_by_user": None, "primary_config_entry": mock_config_entry.entry_id, @@ -774,6 +829,7 @@ async def test_migration_1_4_to_1_6( "config_entries": [None], "configuration_url": None, "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": None, "hw_version": None, @@ -782,6 +838,8 @@ async def test_migration_1_4_to_1_6( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": None, @@ -796,12 +854,13 @@ async def test_migration_1_4_to_1_6( @pytest.mark.parametrize("load_registries", [False]) -async def test_migration_1_5_to_1_6( +@pytest.mark.usefixtures("freezer") +async def test_migration_1_5_to_1_7( hass: HomeAssistant, hass_storage: dict[str, Any], mock_config_entry: MockConfigEntry, ) -> None: - """Test migration from version 1.5 to 1.6.""" + """Test migration from version 1.5 to 1.7.""" hass_storage[dr.STORAGE_KEY] = { "version": 1, "minor_version": 5, @@ -878,6 +937,72 @@ async def test_migration_1_5_to_1_6( "version": dr.STORAGE_VERSION_MAJOR, "minor_version": dr.STORAGE_VERSION_MINOR, "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": utcnow().isoformat(), + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + +@pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") +async def test_migration_1_6_to_1_8( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration from version 1.6 to 1.8.""" + hass_storage[dr.STORAGE_KEY] = { + "version": 1, + "minor_version": 6, + "key": dr.STORAGE_KEY, "data": { "devices": [ { @@ -914,6 +1039,228 @@ async def test_migration_1_5_to_1_6( "manufacturer": None, "model": None, "name_by_user": None, + "primary_config_entry": None, + "name": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + await dr.async_load(hass) + registry = dr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + ) + assert entry.id == "abcdefghijklm" + + # Update to trigger a store + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + sw_version="new_version", + ) + assert entry.id == "abcdefghijklm" + + # Check we store migrated data + await flush_store(registry._store) + + assert hass_storage[dr.STORAGE_KEY] == { + "version": dr.STORAGE_VERSION_MAJOR, + "minor_version": dr.STORAGE_VERSION_MINOR, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "name": None, + "primary_config_entry": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + +@pytest.mark.parametrize("load_registries", [False]) +@pytest.mark.usefixtures("freezer") +async def test_migration_1_7_to_1_8( + hass: HomeAssistant, + hass_storage: dict[str, Any], + mock_config_entry: MockConfigEntry, +) -> None: + """Test migration from version 1.7 to 1.8.""" + hass_storage[dr.STORAGE_KEY] = { + "version": 1, + "minor_version": 7, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "model_id": None, + "name": "name", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "name_by_user": None, + "primary_config_entry": None, + "name": None, + "serial_number": None, + "sw_version": None, + "via_device_id": None, + }, + ], + "deleted_devices": [], + }, + } + + await dr.async_load(hass) + registry = dr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + ) + assert entry.id == "abcdefghijklm" + + # Update to trigger a store + entry = registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + connections={("Zigbee", "01.23.45.67.89")}, + identifiers={("serial", "123456ABCDEF")}, + sw_version="new_version", + ) + assert entry.id == "abcdefghijklm" + + # Check we store migrated data + await flush_store(registry._store) + + assert hass_storage[dr.STORAGE_KEY] == { + "version": dr.STORAGE_VERSION_MAJOR, + "minor_version": dr.STORAGE_VERSION_MINOR, + "key": dr.STORAGE_KEY, + "data": { + "devices": [ + { + "area_id": None, + "config_entries": [mock_config_entry.entry_id], + "configuration_url": None, + "connections": [["Zigbee", "01.23.45.67.89"]], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": "service", + "hw_version": "hw_version", + "id": "abcdefghijklm", + "identifiers": [["serial", "123456ABCDEF"]], + "labels": ["blah"], + "manufacturer": "manufacturer", + "model": "model", + "name": "name", + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, + "primary_config_entry": mock_config_entry.entry_id, + "serial_number": None, + "sw_version": "new_version", + "via_device_id": None, + }, + { + "area_id": None, + "config_entries": [None], + "configuration_url": None, + "connections": [], + "created_at": "1970-01-01T00:00:00+00:00", + "disabled_by": None, + "entry_type": None, + "hw_version": None, + "id": "invalid-entry-type", + "identifiers": [["serial", "mock-id-invalid-entry"]], + "labels": ["blah"], + "manufacturer": None, + "model": None, + "model_id": None, + "modified_at": "1970-01-01T00:00:00+00:00", + "name_by_user": None, "name": None, "primary_config_entry": None, "serial_number": None, @@ -1399,8 +1746,11 @@ async def test_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Verify that we can update some attributes of a device.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, dr.EVENT_DEVICE_REGISTRY_UPDATED) entry = device_registry.async_get_or_create( config_entry_id=mock_config_entry.entry_id, @@ -1412,7 +1762,11 @@ async def test_update( assert not entry.area_id assert not entry.labels assert not entry.name_by_user + assert entry.created_at == created_at + assert entry.modified_at == created_at + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) with patch.object(device_registry, "async_schedule_save") as mock_save: updated_entry = device_registry.async_update_device( entry.id, @@ -1424,6 +1778,7 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", + model_id="Test Model Name", name_by_user="Test Friendly Name", name="name", new_connections=new_connections, @@ -1441,6 +1796,7 @@ async def test_update( config_entries={mock_config_entry.entry_id}, configuration_url="https://example.com/config", connections={("mac", "65:43:21:fe:dc:ba")}, + created_at=created_at, disabled_by=dr.DeviceEntryDisabler.USER, entry_type=dr.DeviceEntryType.SERVICE, hw_version="hw_version", @@ -1449,6 +1805,8 @@ async def test_update( labels={"label1", "label2"}, manufacturer="Test Producer", model="Test Model", + model_id="Test Model Name", + modified_at=modified_at, name_by_user="Test Friendly Name", name="name", serial_number="serial_no", @@ -1503,6 +1861,7 @@ async def test_update( "labels": set(), "manufacturer": None, "model": None, + "model_id": None, "name": None, "name_by_user": None, "serial_number": None, @@ -2466,6 +2825,7 @@ async def test_loading_invalid_configuration_url_from_storage( "config_entries": ["1234"], "configuration_url": "invalid", "connections": [], + "created_at": "2024-01-01T00:00:00+00:00", "disabled_by": None, "entry_type": dr.DeviceEntryType.SERVICE, "hw_version": None, @@ -2474,6 +2834,8 @@ async def test_loading_invalid_configuration_url_from_storage( "labels": [], "manufacturer": None, "model": None, + "model_id": None, + "modified_at": "2024-02-01T00:00:00+00:00", "name_by_user": None, "name": None, "primary_config_entry": "1234", diff --git a/tests/helpers/test_discovery_flow.py b/tests/helpers/test_discovery_flow.py index 9c2249ac17f..0fa315d684b 100644 --- a/tests/helpers/test_discovery_flow.py +++ b/tests/helpers/test_discovery_flow.py @@ -1,12 +1,13 @@ """Test the discovery flow helper.""" +from collections.abc import Generator from unittest.mock import AsyncMock, call, patch import pytest -from typing_extensions import Generator from homeassistant import config_entries -from homeassistant.core import EVENT_HOMEASSISTANT_STARTED, CoreState, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED +from homeassistant.core import CoreState, HomeAssistant from homeassistant.helpers import discovery_flow diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index f76b8555580..283a5b4fb37 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -22,15 +22,16 @@ from homeassistant.const import ( ATTR_FRIENDLY_NAME, STATE_UNAVAILABLE, STATE_UNKNOWN, + EntityCategory, ) from homeassistant.core import ( Context, HassJobType, HomeAssistant, - HomeAssistantError, ReleaseChannel, callback, ) +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.helpers.typing import UNDEFINED, UndefinedType @@ -922,13 +923,13 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: key="abc", entity_category="ignore_me" ) mock_entity1.entity_id = "hello.world" - mock_entity1._attr_entity_category = entity.EntityCategory.CONFIG + mock_entity1._attr_entity_category = EntityCategory.CONFIG assert mock_entity1.entity_category == "config" mock_entity2 = entity.Entity() mock_entity2.hass = hass mock_entity2.entity_description = entity.EntityDescription( - key="abc", entity_category=entity.EntityCategory.CONFIG + key="abc", entity_category=EntityCategory.CONFIG ) mock_entity2.entity_id = "hello.world" assert mock_entity2.entity_category == "config" @@ -937,8 +938,8 @@ async def test_entity_category_property(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("value", "expected"), [ - ("config", entity.EntityCategory.CONFIG), - ("diagnostic", entity.EntityCategory.DIAGNOSTIC), + ("config", EntityCategory.CONFIG), + ("diagnostic", EntityCategory.DIAGNOSTIC), ], ) def test_entity_category_schema(value, expected) -> None: @@ -946,7 +947,7 @@ def test_entity_category_schema(value, expected) -> None: schema = vol.Schema(entity.ENTITY_CATEGORIES_SCHEMA) result = schema(value) assert result == expected - assert isinstance(result, entity.EntityCategory) + assert isinstance(result, EntityCategory) @pytest.mark.parametrize("value", [None, "non_existing"]) diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 32ce740edb2..3f34305b39d 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -117,7 +117,7 @@ async def test_setup_does_discovery( await hass.async_block_till_done() assert mock_setup.called - assert ("platform_test", {}, {"msg": "discovery_info"}) == mock_setup.call_args[0] + assert mock_setup.call_args[0] == ("platform_test", {}, {"msg": "discovery_info"}) async def test_set_scan_interval_via_config(hass: HomeAssistant) -> None: @@ -191,9 +191,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert ["test_domain.test_1", "test_domain.test_3"] == sorted( + assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_1)) - ) + ) == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( "test", @@ -201,9 +201,9 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert ["test_domain.test_3"] == sorted( + assert sorted( ent.entity_id for ent in (await component.async_extract_from_service(call_2)) - ) + ) == ["test_domain.test_3"] async def test_platform_not_ready(hass: HomeAssistant) -> None: @@ -288,9 +288,9 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert ["test_domain.test_2"] == [ + assert [ ent.entity_id for ent in await component.async_extract_from_service(call) - ] + ] == ["test_domain.test_2"] async def test_extract_from_service_no_group_expand(hass: HomeAssistant) -> None: @@ -467,8 +467,11 @@ async def test_extract_all_omit_entity_id( call = ServiceCall("test", "service") - assert [] == sorted( - ent.entity_id for ent in await component.async_extract_from_service(call) + assert ( + sorted( + ent.entity_id for ent in await component.async_extract_from_service(call) + ) + == [] ) @@ -484,9 +487,9 @@ async def test_extract_all_use_match_all( call = ServiceCall("test", "service", {"entity_id": "all"}) - assert ["test_domain.test_1", "test_domain.test_2"] == sorted( + assert sorted( ent.entity_id for ent in await component.async_extract_from_service(call) - ) + ) == ["test_domain.test_1", "test_domain.test_2"] assert ( "Not passing an entity ID to a service to target all entities is deprecated" ) not in caplog.text diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 4e761a21e8c..75a41945a91 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -9,7 +9,7 @@ from unittest.mock import ANY, AsyncMock, Mock, patch import pytest -from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE +from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory from homeassistant.core import ( CoreState, HomeAssistant, @@ -26,12 +26,8 @@ from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, ) -from homeassistant.helpers.entity import ( - DeviceInfo, - Entity, - EntityCategory, - async_generate_entity_id, -) +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, async_generate_entity_id from homeassistant.helpers.entity_component import ( DEFAULT_SCAN_INTERVAL, EntityComponent, @@ -1426,6 +1422,7 @@ async def test_entity_hidden_by_integration( assert entry_hidden.hidden_by is er.RegistryEntryHider.INTEGRATION +@pytest.mark.usefixtures("freezer") async def test_entity_info_added_to_entity_registry( hass: HomeAssistant, entity_registry: er.EntityRegistry ) -> None: @@ -1454,11 +1451,13 @@ async def test_entity_info_added_to_entity_registry( "default", "test_domain", capabilities={"max": 100}, + created_at=dt_util.utcnow(), device_class=None, entity_category=EntityCategory.CONFIG, has_entity_name=True, icon=None, id=ANY, + modified_at=dt_util.utcnow(), name=None, original_device_class="mock-device-class", original_icon="nice:icon", diff --git a/tests/helpers/test_entity_registry.py b/tests/helpers/test_entity_registry.py index 4dc8d79be3f..9b1d68c7777 100644 --- a/tests/helpers/test_entity_registry.py +++ b/tests/helpers/test_entity_registry.py @@ -1,6 +1,6 @@ """Tests for the Entity Registry.""" -from datetime import timedelta +from datetime import datetime, timedelta from functools import partial from typing import Any from unittest.mock import patch @@ -19,8 +19,10 @@ from homeassistant.const import ( from homeassistant.core import CoreState, HomeAssistant, callback from homeassistant.exceptions import MaxLengthExceeded from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.util.dt import utc_from_timestamp from tests.common import ( + ANY, MockConfigEntry, async_capture_events, async_fire_time_changed, @@ -69,9 +71,14 @@ def test_get_or_create_suggested_object_id(entity_registry: er.EntityRegistry) - assert entry.entity_id == "light.beer" -def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: +def test_get_or_create_updates_data( + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: """Test that we update data in get_or_create.""" orig_config_entry = MockConfigEntry(domain="light") + created = datetime.fromisoformat("2024-02-14T12:00:00.0+00:00") + freezer.move_to(created) orig_entry = entity_registry.async_get_or_create( "light", @@ -100,6 +107,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: "hue", capabilities={"max": 100}, config_entry_id=orig_config_entry.entry_id, + created_at=created, device_class=None, device_id="mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, @@ -108,6 +116,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, icon=None, id=orig_entry.id, + modified_at=created, name=None, original_device_class="mock-device-class", original_icon="initial-original_icon", @@ -118,6 +127,8 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: ) new_config_entry = MockConfigEntry(domain="light") + modified = created + timedelta(minutes=5) + freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -146,6 +157,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: area_id=None, capabilities={"new-max": 150}, config_entry_id=new_config_entry.entry_id, + created_at=created, device_class=None, device_id="new-mock-dev-id", disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -154,6 +166,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, + modified_at=modified, name=None, original_device_class="new-mock-device-class", original_icon="updated-original_icon", @@ -164,6 +177,8 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: ) assert set(entity_registry.async_device_ids()) == {"new-mock-dev-id"} + modified = created + timedelta(minutes=5) + freezer.move_to(modified) new_entry = entity_registry.async_get_or_create( "light", @@ -192,6 +207,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: area_id=None, capabilities=None, config_entry_id=None, + created_at=created, device_class=None, device_id=None, disabled_by=er.RegistryEntryDisabler.HASS, # Should not be updated @@ -200,6 +216,7 @@ def test_get_or_create_updates_data(entity_registry: er.EntityRegistry) -> None: hidden_by=er.RegistryEntryHider.INTEGRATION, # Should not be updated icon=None, id=orig_entry.id, + modified_at=modified, name=None, original_device_class=None, original_icon=None, @@ -309,8 +326,12 @@ async def test_loading_saving_data( assert orig_entry1 == new_entry1 assert orig_entry2 == new_entry2 - assert orig_entry3 == new_entry3 - assert orig_entry4 == new_entry4 + + # By converting a deleted device to a active device, the modified_at will be updated + assert orig_entry3.modified_at < new_entry3.modified_at + assert attr.evolve(orig_entry3, modified_at=new_entry3.modified_at) == new_entry3 + assert orig_entry4.modified_at < new_entry4.modified_at + assert attr.evolve(orig_entry4, modified_at=new_entry4.modified_at) == new_entry4 assert new_entry2.area_id == "mock-area-id" assert new_entry2.categories == {"scope", "id"} @@ -422,6 +443,8 @@ async def test_filter_on_load( assert entry_with_name.name == "registry override" assert entry_without_name.name is None assert not entry_with_name.disabled + assert entry_with_name.created_at == utc_from_timestamp(0) + assert entry_with_name.modified_at == utc_from_timestamp(0) entry_disabled_hass = registry.async_get_or_create( "test", "super_platform", "disabled-hass" @@ -453,6 +476,7 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -463,6 +487,7 @@ async def test_load_bad_data( "icon": None, "id": "00001", "labels": [], + "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -481,6 +506,7 @@ async def test_load_bad_data( "capabilities": None, "categories": {}, "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "device_class": None, "device_id": None, "disabled_by": None, @@ -491,6 +517,7 @@ async def test_load_bad_data( "icon": None, "id": "00002", "labels": [], + "modified_at": "2024-02-14T12:00:00.900075+00:00", "name": None, "options": None, "original_device_class": None, @@ -507,16 +534,20 @@ async def test_load_bad_data( "deleted_entities": [ { "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test3", "id": "00003", + "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": 234, # Should not load }, { "config_entry_id": None, + "created_at": "2024-02-14T12:00:00.900075+00:00", "entity_id": "test.test4", "id": "00004", + "modified_at": "2024-02-14T12:00:00.900075+00:00", "orphaned_timestamp": None, "platform": "super_platform", "unique_id": ["also", "not", "valid"], # Should not load @@ -695,6 +726,49 @@ async def test_migration_1_1(hass: HomeAssistant, hass_storage: dict[str, Any]) assert entry.device_class is None assert entry.original_device_class == "best_class" + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[er.STORAGE_KEY] == { + "version": er.STORAGE_VERSION_MAJOR, + "minor_version": er.STORAGE_VERSION_MINOR, + "key": er.STORAGE_KEY, + "data": { + "entities": [ + { + "aliases": [], + "area_id": None, + "capabilities": {}, + "categories": {}, + "config_entry_id": None, + "created_at": "1970-01-01T00:00:00+00:00", + "device_id": None, + "disabled_by": None, + "entity_category": None, + "entity_id": "test.entity", + "has_entity_name": False, + "hidden_by": None, + "icon": None, + "id": ANY, + "labels": [], + "modified_at": "1970-01-01T00:00:00+00:00", + "name": None, + "options": {}, + "original_device_class": "best_class", + "original_icon": None, + "original_name": None, + "platform": "super_platform", + "previous_unique_id": None, + "supported_features": 0, + "translation_key": None, + "unique_id": "very_unique", + "unit_of_measurement": None, + "device_class": None, + } + ], + "deleted_entities": [], + }, + } + @pytest.mark.parametrize("load_registries", [False]) async def test_migration_1_7(hass: HomeAssistant, hass_storage: dict[str, Any]) -> None: diff --git a/tests/helpers/test_floor_registry.py b/tests/helpers/test_floor_registry.py index 3b07563fd11..c39ac3c40b4 100644 --- a/tests/helpers/test_floor_registry.py +++ b/tests/helpers/test_floor_registry.py @@ -1,15 +1,18 @@ """Tests for the floor registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant from homeassistant.helpers import area_registry as ar, floor_registry as fr +from homeassistant.util.dt import utcnow -from tests.common import async_capture_events, flush_store +from tests.common import ANY, async_capture_events, flush_store async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: @@ -18,8 +21,10 @@ async def test_list_floors(floor_registry: fr.FloorRegistry) -> None: assert len(list(floors)) == len(floor_registry.floors) +@pytest.mark.usefixtures("freezer") async def test_create_floor( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can create floors.""" update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) @@ -30,11 +35,16 @@ async def test_create_floor( level=1, ) - assert floor.floor_id == "first_floor" - assert floor.name == "First floor" - assert floor.icon == "mdi:home-floor-1" - assert floor.aliases == {"first", "ground", "ground floor"} - assert floor.level == 1 + assert floor == fr.FloorEntry( + floor_id="first_floor", + name="First floor", + icon="mdi:home-floor-1", + aliases={"first", "ground", "ground floor"}, + level=1, + created_at=utcnow(), + modified_at=utcnow(), + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 @@ -116,18 +126,31 @@ async def test_delete_non_existing_floor(floor_registry: fr.FloorRegistry) -> No async def test_update_floor( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update floors.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) + update_events = async_capture_events(hass, fr.EVENT_FLOOR_REGISTRY_UPDATED) floor = floor_registry.async_create("First floor") + assert floor == fr.FloorEntry( + floor_id="first_floor", + name="First floor", + icon=None, + aliases=set(), + level=None, + created_at=created_at, + modified_at=created_at, + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 - assert floor.floor_id == "first_floor" - assert floor.name == "First floor" - assert floor.icon is None - assert floor.aliases == set() - assert floor.level is None + + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_floor = floor_registry.async_update( floor.floor_id, @@ -138,11 +161,16 @@ async def test_update_floor( ) assert updated_floor != floor - assert updated_floor.floor_id == "first_floor" - assert updated_floor.name == "Second floor" - assert updated_floor.icon == "mdi:home-floor-2" - assert updated_floor.aliases == {"ground", "downstairs"} - assert updated_floor.level == 2 + assert updated_floor == fr.FloorEntry( + floor_id="first_floor", + name="Second floor", + icon="mdi:home-floor-2", + aliases={"ground", "downstairs"}, + level=2, + created_at=created_at, + modified_at=modified_at, + normalized_name=ANY, + ) assert len(floor_registry.floors) == 1 @@ -236,15 +264,22 @@ async def test_update_floor_with_normalized_name_already_in_use( async def test_load_floors( - hass: HomeAssistant, floor_registry: fr.FloorRegistry + hass: HomeAssistant, + floor_registry: fr.FloorRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can load/save data correctly.""" + floor1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") + freezer.move_to(floor1_created) floor1 = floor_registry.async_create( "First floor", icon="mdi:home-floor-1", aliases={"first", "ground"}, level=1, ) + + floor2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") + freezer.move_to(floor2_created) floor2 = floor_registry.async_create( "Second floor", icon="mdi:home-floor-2", @@ -262,25 +297,16 @@ async def test_load_floors( assert list(floor_registry.floors) == list(registry2.floors) floor1_registry2 = registry2.async_get_floor_by_name("First floor") - assert floor1_registry2.floor_id == floor1.floor_id - assert floor1_registry2.name == floor1.name - assert floor1_registry2.icon == floor1.icon - assert floor1_registry2.aliases == floor1.aliases - assert floor1_registry2.level == floor1.level - assert floor1_registry2.normalized_name == floor1.normalized_name + assert floor1_registry2 == floor1 floor2_registry2 = registry2.async_get_floor_by_name("Second floor") - assert floor2_registry2.floor_id == floor2.floor_id - assert floor2_registry2.name == floor2.name - assert floor2_registry2.icon == floor2.icon - assert floor2_registry2.aliases == floor2.aliases - assert floor2_registry2.level == floor2.level - assert floor2_registry2.normalized_name == floor2.normalized_name + assert floor2_registry2 == floor2 @pytest.mark.parametrize("load_registries", [False]) async def test_loading_floors_from_storage( - hass: HomeAssistant, hass_storage: dict[str, Any] + hass: HomeAssistant, + hass_storage: dict[str, Any], ) -> None: """Test loading stored floors on start.""" hass_storage[fr.STORAGE_KEY] = { @@ -392,3 +418,52 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(floor_registry.async_update, any_floor.floor_id, name="new name") ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[fr.STORAGE_KEY] = { + "version": 1, + "data": { + "floors": [ + { + "floor_id": "12345A", + "name": "mock", + "aliases": [], + "icon": None, + "level": None, + } + ] + }, + } + + await fr.async_load(hass) + registry = fr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_floor_by_name("mock") + assert entry.floor_id == "12345A" + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[fr.STORAGE_KEY] == { + "version": fr.STORAGE_VERSION_MAJOR, + "minor_version": fr.STORAGE_VERSION_MINOR, + "key": fr.STORAGE_KEY, + "data": { + "floors": [ + { + "aliases": [], + "icon": None, + "floor_id": "12345A", + "level": None, + "name": "mock", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + } + ] + }, + } diff --git a/tests/helpers/test_httpx_client.py b/tests/helpers/test_httpx_client.py index 60bdbe607e3..ccfccb3d698 100644 --- a/tests/helpers/test_httpx_client.py +++ b/tests/helpers/test_httpx_client.py @@ -5,7 +5,8 @@ from unittest.mock import Mock, patch import httpx import pytest -from homeassistant.core import EVENT_HOMEASSISTANT_CLOSE, HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant import homeassistant.helpers.httpx_client as client from tests.common import MockModule, extract_stack_to_frame, mock_integration diff --git a/tests/helpers/test_integration_platform.py b/tests/helpers/test_integration_platform.py index 81eb1f2fd38..497bae5fb88 100644 --- a/tests/helpers/test_integration_platform.py +++ b/tests/helpers/test_integration_platform.py @@ -7,12 +7,13 @@ from unittest.mock import Mock, patch import pytest from homeassistant import loader +from homeassistant.const import EVENT_COMPONENT_LOADED from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) -from homeassistant.setup import ATTR_COMPONENT, EVENT_COMPONENT_LOADED +from homeassistant.setup import ATTR_COMPONENT from tests.common import mock_platform diff --git a/tests/helpers/test_intent.py b/tests/helpers/test_intent.py index c592fc50c0a..ae8c2ed65d0 100644 --- a/tests/helpers/test_intent.py +++ b/tests/helpers/test_intent.py @@ -765,7 +765,7 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N ) # Still fails even if we provide the domain - with pytest.raises(intent.MatchFailedError): + with pytest.raises(intent.InvalidSlotInfo): await intent.async_handle( hass, "test", @@ -777,7 +777,10 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N async def test_service_handler_empty_strings(hass: HomeAssistant) -> None: """Test that passing empty strings for filters fails in ServiceIntentHandler.""" handler = intent.ServiceIntentHandler( - "TestType", "light", "turn_on", "Turned {} on" + "TestType", + "light", + "turn_on", + "Turned {} on", ) intent.async_register(hass, handler) @@ -814,3 +817,55 @@ async def test_service_handler_no_filter(hass: HomeAssistant) -> None: "test", "TestType", ) + + +async def test_service_handler_device_classes( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that passing empty strings for filters fails in ServiceIntentHandler.""" + + # Register a fake service and a switch intent handler + call_done = asyncio.Event() + calls = [] + + # Register a service that takes 0.1 seconds to execute + async def mock_service(call): + """Mock service.""" + call_done.set() + calls.append(call) + + hass.services.async_register("switch", "turn_on", mock_service) + + handler = intent.ServiceIntentHandler( + "TestType", + "switch", + "turn_on", + "Turned {} on", + device_classes={switch.SwitchDeviceClass}, + ) + intent.async_register(hass, handler) + + # Create a switch enttiy and match by device class + hass.states.async_set( + "switch.bedroom", "off", attributes={"device_class": "outlet"} + ) + hass.states.async_set("switch.living_room", "off") + + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "outlet"}}, + ) + await call_done.wait() + assert [call.data.get("entity_id") for call in calls] == ["switch.bedroom"] + calls.clear() + + # Validate which device classes are allowed + with pytest.raises(intent.InvalidSlotInfo): + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "light"}}, + ) diff --git a/tests/helpers/test_json.py b/tests/helpers/test_json.py index 061faed6f93..123731de68d 100644 --- a/tests/helpers/test_json.py +++ b/tests/helpers/test_json.py @@ -13,6 +13,7 @@ from unittest.mock import Mock, patch import pytest from homeassistant.core import Event, HomeAssistant, State +from homeassistant.helpers import json as json_helper from homeassistant.helpers.json import ( ExtendedJSONEncoder, JSONEncoder as DefaultHASSJSONEncoder, @@ -25,9 +26,14 @@ from homeassistant.helpers.json import ( ) from homeassistant.util import dt as dt_util from homeassistant.util.color import RGBColor -from homeassistant.util.json import SerializationError, load_json +from homeassistant.util.json import ( + JSON_DECODE_EXCEPTIONS, + JSON_ENCODE_EXCEPTIONS, + SerializationError, + load_json, +) -from tests.common import json_round_trip +from tests.common import import_and_test_deprecated_constant, json_round_trip # Test data that can be saved as JSON TEST_JSON_A = {"a": 1, "B": "two"} @@ -335,3 +341,50 @@ def test_find_unserializable_data() -> None: BadData(), dump=partial(json.dumps, cls=MockJSONEncoder), ) == {"$(BadData).bla": bad_data} + + +def test_deprecated_json_loads(caplog: pytest.LogCaptureFixture) -> None: + """Test deprecated json_loads function. + + It was moved from helpers to util in #88099 + """ + json_helper.json_loads("{}") + assert ( + "json_loads is a deprecated function which will be removed in " + "HA Core 2025.8. Use homeassistant.util.json.json_loads instead" + ) in caplog.text + + +@pytest.mark.parametrize( + ("constant_name", "replacement_name", "replacement"), + [ + ( + "JSON_DECODE_EXCEPTIONS", + "homeassistant.util.json.JSON_DECODE_EXCEPTIONS", + JSON_DECODE_EXCEPTIONS, + ), + ( + "JSON_ENCODE_EXCEPTIONS", + "homeassistant.util.json.JSON_ENCODE_EXCEPTIONS", + JSON_ENCODE_EXCEPTIONS, + ), + ], +) +def test_deprecated_aliases( + caplog: pytest.LogCaptureFixture, + constant_name: str, + replacement_name: str, + replacement: Any, +) -> None: + """Test deprecated JSON_DECODE_EXCEPTIONS and JSON_ENCODE_EXCEPTIONS constants. + + They were moved from helpers to util in #88099 + """ + import_and_test_deprecated_constant( + caplog, + json_helper, + constant_name, + replacement_name, + replacement, + "2025.8", + ) diff --git a/tests/helpers/test_label_registry.py b/tests/helpers/test_label_registry.py index 445319a4b62..f466edad874 100644 --- a/tests/helpers/test_label_registry.py +++ b/tests/helpers/test_label_registry.py @@ -1,9 +1,11 @@ """Tests for the Label Registry.""" +from datetime import datetime from functools import partial import re from typing import Any +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.core import HomeAssistant @@ -12,8 +14,9 @@ from homeassistant.helpers import ( entity_registry as er, label_registry as lr, ) +from homeassistant.util.dt import utcnow -from tests.common import MockConfigEntry, async_capture_events, flush_store +from tests.common import ANY, MockConfigEntry, async_capture_events, flush_store async def test_list_labels(label_registry: lr.LabelRegistry) -> None: @@ -22,6 +25,7 @@ async def test_list_labels(label_registry: lr.LabelRegistry) -> None: assert len(list(labels)) == len(label_registry.labels) +@pytest.mark.usefixtures("freezer") async def test_create_label( hass: HomeAssistant, label_registry: lr.LabelRegistry ) -> None: @@ -34,11 +38,16 @@ async def test_create_label( description="This label is for testing", ) - assert label.label_id == "my_label" - assert label.name == "My Label" - assert label.color == "#FF0000" - assert label.icon == "mdi:test" - assert label.description == "This label is for testing" + assert label == lr.LabelEntry( + label_id="my_label", + name="My Label", + color="#FF0000", + icon="mdi:test", + description="This label is for testing", + created_at=utcnow(), + modified_at=utcnow(), + normalized_name=ANY, + ) assert len(label_registry.labels) == 1 @@ -119,19 +128,30 @@ async def test_delete_non_existing_label(label_registry: lr.LabelRegistry) -> No async def test_update_label( - hass: HomeAssistant, label_registry: lr.LabelRegistry + hass: HomeAssistant, + label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can update labels.""" + created_at = datetime.fromisoformat("2024-01-01T01:00:00+00:00") + freezer.move_to(created_at) update_events = async_capture_events(hass, lr.EVENT_LABEL_REGISTRY_UPDATED) label = label_registry.async_create("Mock") assert len(label_registry.labels) == 1 - assert label.label_id == "mock" - assert label.name == "Mock" - assert label.color is None - assert label.icon is None - assert label.description is None + assert label == lr.LabelEntry( + label_id="mock", + name="Mock", + color=None, + icon=None, + description=None, + created_at=created_at, + modified_at=created_at, + normalized_name=ANY, + ) + modified_at = datetime.fromisoformat("2024-02-01T01:00:00+00:00") + freezer.move_to(modified_at) updated_label = label_registry.async_update( label.label_id, name="Updated", @@ -141,12 +161,16 @@ async def test_update_label( ) assert updated_label != label - assert updated_label.label_id == "mock" - assert updated_label.name == "Updated" - assert updated_label.color == "#FFFFFF" - assert updated_label.icon == "mdi:update" - assert updated_label.description == "Updated description" - + assert updated_label == lr.LabelEntry( + label_id="mock", + name="Updated", + color="#FFFFFF", + icon="mdi:update", + description="Updated description", + created_at=created_at, + modified_at=modified_at, + normalized_name=ANY, + ) assert len(label_registry.labels) == 1 await hass.async_block_till_done() @@ -242,15 +266,21 @@ async def test_update_label_with_normalized_name_already_in_use( async def test_load_labels( - hass: HomeAssistant, label_registry: lr.LabelRegistry + hass: HomeAssistant, + label_registry: lr.LabelRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Make sure that we can load/save data correctly.""" + label1_created = datetime.fromisoformat("2024-01-01T00:00:00+00:00") + freezer.move_to(label1_created) label1 = label_registry.async_create( "Label One", color="#FF000", icon="mdi:one", description="This label is label one", ) + label2_created = datetime.fromisoformat("2024-02-01T00:00:00+00:00") + freezer.move_to(label2_created) label2 = label_registry.async_create( "Label Two", color="#000FF", @@ -268,19 +298,10 @@ async def test_load_labels( assert list(label_registry.labels) == list(registry2.labels) label1_registry2 = registry2.async_get_label_by_name("Label One") - assert label1_registry2.label_id == label1.label_id - assert label1_registry2.name == label1.name - assert label1_registry2.color == label1.color - assert label1_registry2.description == label1.description - assert label1_registry2.icon == label1.icon - assert label1_registry2.normalized_name == label1.normalized_name + assert label1_registry2 == label1 label2_registry2 = registry2.async_get_label_by_name("Label Two") - assert label2_registry2.name == label2.name - assert label2_registry2.color == label2.color - assert label2_registry2.description == label2.description - assert label2_registry2.icon == label2.icon - assert label2_registry2.normalized_name == label2.normalized_name + assert label2_registry2 == label2 @pytest.mark.parametrize("load_registries", [False]) @@ -298,6 +319,8 @@ async def test_loading_label_from_storage( "icon": "mdi:test", "label_id": "one", "name": "One", + "created_at": "2024-01-01T00:00:00+00:00", + "modified_at": "2024-02-01T00:00:00+00:00", } ] }, @@ -489,3 +512,52 @@ async def test_async_update_thread_safety( await hass.async_add_executor_job( partial(label_registry.async_update, any_label.label_id, name="new name") ) + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_1( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.1.""" + hass_storage[lr.STORAGE_KEY] = { + "version": 1, + "data": { + "labels": [ + { + "color": None, + "description": None, + "icon": None, + "label_id": "12345A", + "name": "mock", + } + ] + }, + } + + await lr.async_load(hass) + registry = lr.async_get(hass) + + # Test data was loaded + entry = registry.async_get_label_by_name("mock") + assert entry.label_id == "12345A" + + # Check we store migrated data + await flush_store(registry._store) + assert hass_storage[lr.STORAGE_KEY] == { + "version": lr.STORAGE_VERSION_MAJOR, + "minor_version": lr.STORAGE_VERSION_MINOR, + "key": lr.STORAGE_KEY, + "data": { + "labels": [ + { + "color": None, + "description": None, + "icon": None, + "label_id": "12345A", + "name": "mock", + "created_at": "1970-01-01T00:00:00+00:00", + "modified_at": "1970-01-01T00:00:00+00:00", + } + ] + }, + } diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index e1f55942d10..4d14abb9819 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -22,7 +22,6 @@ from homeassistant.helpers import ( selector, ) from homeassistant.setup import async_setup_component -from homeassistant.util import yaml from tests.common import MockConfigEntry @@ -412,7 +411,9 @@ async def test_assist_api_prompt( ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) - def create_entity(device: dr.DeviceEntry, write_state=True) -> None: + def create_entity( + device: dr.DeviceEntry, write_state=True, aliases: set[str] | None = None + ) -> None: """Create an entity for a device and track entity_id.""" entity = entity_registry.async_get_or_create( "light", @@ -422,6 +423,8 @@ async def test_assist_api_prompt( original_name=str(device.name or "Unnamed Device"), suggested_object_id=str(device.name or "unnamed_device"), ) + if aliases: + entity_registry.async_update_entity(entity.entity_id, aliases=aliases) if write_state: entity.write_unavailable_state(hass) @@ -433,7 +436,8 @@ async def test_assist_api_prompt( manufacturer="Test Manufacturer", model="Test Model", suggested_area="Test Area", - ) + ), + aliases={"my test light"}, ) for i in range(3): create_entity( @@ -506,74 +510,58 @@ async def test_assist_api_prompt( suggested_area="Test Area 2", ) ) - - exposed_entities = llm._get_exposed_entities(hass, llm_context.assistant) - assert exposed_entities == { - "light.1": { - "areas": "Test Area 2", - "names": "1", - "state": "unavailable", - }, - entry1.entity_id: { - "names": "Kitchen", - "state": "on", - "attributes": {"temperature": "0.9", "humidity": "65"}, - }, - entry2.entity_id: { - "areas": "Test Area, Alternative name", - "names": "Living Room", - "state": "on", - }, - "light.test_device": { - "areas": "Test Area, Alternative name", - "names": "Test Device", - "state": "unavailable", - }, - "light.test_device_2": { - "areas": "Test Area 2", - "names": "Test Device 2", - "state": "unavailable", - }, - "light.test_device_3": { - "areas": "Test Area 2", - "names": "Test Device 3", - "state": "unavailable", - }, - "light.test_device_4": { - "areas": "Test Area 2", - "names": "Test Device 4", - "state": "unavailable", - }, - "light.test_service": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.test_service_2": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.test_service_3": { - "areas": "Test Area, Alternative name", - "names": "Test Service", - "state": "unavailable", - }, - "light.unnamed_device": { - "areas": "Test Area 2", - "names": "Unnamed Device", - "state": "unavailable", - }, - } - exposed_entities_prompt = ( - "An overview of the areas and the devices in this smart home:\n" - + yaml.dump(exposed_entities) - ) + exposed_entities_prompt = """An overview of the areas and the devices in this smart home: +- names: Kitchen + domain: light + state: 'on' + attributes: + temperature: '0.9' + humidity: '65' +- names: Living Room + domain: light + state: 'on' + areas: Test Area, Alternative name +- names: Test Device, my test light + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Service + domain: light + state: unavailable + areas: Test Area, Alternative name +- names: Test Device 2 + domain: light + state: unavailable + areas: Test Area 2 +- names: Test Device 3 + domain: light + state: unavailable + areas: Test Area 2 +- names: Test Device 4 + domain: light + state: unavailable + areas: Test Area 2 +- names: Unnamed Device + domain: light + state: unavailable + areas: Test Area 2 +- names: '1' + domain: light + state: unavailable + areas: Test Area 2 +""" first_part_prompt = ( "When controlling Home Assistant always call the intent tools. " "Use HassTurnOn to lock and HassTurnOff to unlock a lock. " - "When controlling a device, prefer passing just its name and its domain " - "(what comes before the dot in its entity id). " + "When controlling a device, prefer passing just name and domain. " "When controlling an area, prefer passing just area name and domain." ) no_timer_prompt = "This device is not able to start timers." @@ -633,6 +621,7 @@ async def test_assist_api_prompt( async def test_script_tool( hass: HomeAssistant, + entity_registry: er.EntityRegistry, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, ) -> None: @@ -676,6 +665,10 @@ async def test_script_tool( ) async_expose_entity(hass, "conversation", "script.test_script", True) + entity_registry.async_update_entity( + "script.test_script", name="script name", aliases={"script alias"} + ) + area = area_registry.async_create("Living room") floor = floor_registry.async_create("2") @@ -688,7 +681,10 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a test script" + assert ( + tool.description + == "This is a test script. Aliases: ['script name', 'script alias']" + ) schema = { vol.Required("beer", description="Number of beers"): cv.string, vol.Optional("wine"): selector.NumberSelector({"min": 0, "max": 3}), @@ -701,7 +697,10 @@ async def test_script_tool( assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a test script", vol.Schema(schema)) + "test_script": ( + "This is a test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } tool_input = llm.ToolInput( @@ -771,12 +770,18 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a new test script" + assert ( + tool.description + == "This is a new test script. Aliases: ['script name', 'script alias']" + ) schema = {vol.Required("beer", description="Number of beers"): cv.string} assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a new test script", vol.Schema(schema)) + "test_script": ( + "This is a new test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } @@ -859,13 +864,22 @@ async def test_selector_serializer( assert selector_serializer( selector.ColorTempSelector({"min_mireds": 100, "max_mireds": 1000}) ) == {"type": "number", "minimum": 100, "maximum": 1000} + assert selector_serializer(selector.ConditionSelector()) == { + "type": "array", + "items": {"nullable": True, "type": "string"}, + } assert selector_serializer(selector.ConfigEntrySelector()) == {"type": "string"} assert selector_serializer(selector.ConstantSelector({"value": "test"})) == { - "enum": ["test"] + "type": "string", + "enum": ["test"], + } + assert selector_serializer(selector.ConstantSelector({"value": 1})) == { + "type": "integer", + "enum": [1], } - assert selector_serializer(selector.ConstantSelector({"value": 1})) == {"enum": [1]} assert selector_serializer(selector.ConstantSelector({"value": True})) == { - "enum": [True] + "type": "boolean", + "enum": [True], } assert selector_serializer(selector.QrCodeSelector({"data": "test"})) == { "type": "string" @@ -893,6 +907,17 @@ async def test_selector_serializer( "type": "array", "items": {"type": "string"}, } + assert selector_serializer(selector.DurationSelector()) == { + "type": "object", + "properties": { + "days": {"type": "number"}, + "hours": {"type": "number"}, + "minutes": {"type": "number"}, + "seconds": {"type": "number"}, + "milliseconds": {"type": "number"}, + }, + "required": [], + } assert selector_serializer(selector.EntitySelector()) == { "type": "string", "format": "entity_id", @@ -946,7 +971,10 @@ async def test_selector_serializer( "minimum": 30, "maximum": 100, } - assert selector_serializer(selector.ObjectSelector()) == {"type": "object"} + assert selector_serializer(selector.ObjectSelector()) == { + "type": "object", + "additionalProperties": True, + } assert selector_serializer( selector.SelectSelector( { @@ -968,6 +996,48 @@ async def test_selector_serializer( assert selector_serializer( selector.StateSelector({"entity_id": "sensor.test"}) ) == {"type": "string"} + target_schema = selector_serializer(selector.TargetSelector()) + target_schema["properties"]["entity_id"]["anyOf"][0][ + "enum" + ].sort() # Order is not deterministic + assert target_schema == { + "type": "object", + "properties": { + "area_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "device_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "entity_id": { + "anyOf": [ + {"type": "string", "enum": ["all", "none"], "format": "lower"}, + {"type": "string", "nullable": True}, + {"type": "array", "items": {"type": "string"}}, + ] + }, + "floor_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "label_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + }, + "required": [], + } + assert selector_serializer(selector.TemplateSelector()) == { "type": "string", "format": "jinja2", diff --git a/tests/helpers/test_recorder.py b/tests/helpers/test_recorder.py index 94f30d812bc..8fb8450bcb8 100644 --- a/tests/helpers/test_recorder.py +++ b/tests/helpers/test_recorder.py @@ -18,18 +18,25 @@ async def test_async_migration_in_progress( ): assert recorder.async_migration_in_progress(hass) is False - # The recorder is not loaded - with patch( - "homeassistant.components.recorder.util.async_migration_in_progress", - return_value=True, - ): - assert recorder.async_migration_in_progress(hass) is False - - await async_setup_recorder_instance(hass) - - # The recorder is now loaded with patch( "homeassistant.components.recorder.util.async_migration_in_progress", return_value=True, ): assert recorder.async_migration_in_progress(hass) is True + + +async def test_async_migration_is_live( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test async_migration_in_progress wraps the recorder.""" + with patch( + "homeassistant.components.recorder.util.async_migration_is_live", + return_value=False, + ): + assert recorder.async_migration_is_live(hass) is False + + with patch( + "homeassistant.components.recorder.util.async_migration_is_live", + return_value=True, + ): + assert recorder.async_migration_is_live(hass) is True diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 08c196a04d3..1bc33140124 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -249,7 +249,7 @@ async def test_calling_service_basic( alias = "service step" sequence = cv.SCRIPT_SCHEMA( - {"alias": alias, "service": "test.script", "data": {"hello": "world"}} + {"alias": alias, "action": "test.script", "data": {"hello": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -352,13 +352,13 @@ async def test_calling_service_response_data( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", # Store the result of the service call as a variable "response_variable": "my_response", }, { "alias": "service step2", - "service": "test.script", + "action": "test.script", "data_template": { # Result of previous service call "key": "{{ my_response.data }}" @@ -441,7 +441,7 @@ async def test_service_response_data_errors( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", **params, }, ] @@ -458,7 +458,7 @@ async def test_data_template_with_templated_key(hass: HomeAssistant) -> None: calls = async_mock_service(hass, "test", "script") sequence = cv.SCRIPT_SCHEMA( - {"service": "test.script", "data_template": {"{{ hello_var }}": "world"}} + {"action": "test.script", "data_template": {"{{ hello_var }}": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -525,11 +525,11 @@ async def test_multiple_runs_no_wait(hass: HomeAssistant) -> None: sequence = cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire1 }}", "listen": "{{ listen1 }}"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire2 }}", "listen": "{{ listen2 }}"}, }, ] @@ -605,7 +605,7 @@ async def test_stop_no_wait(hass: HomeAssistant, count) -> None: hass.services.async_register("test", "script", async_simulate_long_service) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script( hass, sequence, @@ -3894,7 +3894,7 @@ async def test_parallel_error( sequence = cv.SCRIPT_SCHEMA( { "parallel": [ - {"service": "epic.failure"}, + {"action": "epic.failure"}, ] } ) @@ -3906,10 +3906,10 @@ async def test_parallel_error( assert len(events) == 0 expected_trace = { - "0": [{"error": "Service epic.failure not found"}], + "0": [{"error": "Action epic.failure not found"}], "0/parallel/0/sequence/0": [ { - "error": "Service epic.failure not found", + "error": "Action epic.failure not found", "result": { "params": { "domain": "epic", @@ -3946,7 +3946,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: await async_setup_component(hass, "homeassistant", {}) event = "test_event" events = async_capture_events(hass, event) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(exceptions.ServiceNotFound): @@ -3958,7 +3958,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: expected_trace = { "0": [ { - "error": "Service test.script not found", + "error": "Action test.script not found", "result": { "params": { "domain": "test", @@ -3980,7 +3980,7 @@ async def test_propagate_error_invalid_service_data(hass: HomeAssistant) -> None events = async_capture_events(hass, event) calls = async_mock_service(hass, "test", "script", vol.Schema({"text": str})) sequence = cv.SCRIPT_SCHEMA( - [{"service": "test.script", "data": {"text": 1}}, {"event": event}] + [{"action": "test.script", "data": {"text": 1}}, {"event": event}] ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -4022,7 +4022,7 @@ async def test_propagate_error_service_exception(hass: HomeAssistant) -> None: hass.services.async_register("test", "script", record_call) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(ValueError): @@ -4057,35 +4057,35 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": { "label_id": ["label_service_list_1", "label_service_list_2"] }, }, { - "service": "test.script", + "action": "test.script", "data": {"label_id": "{{ 'label_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"label_id": "label_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"label_id": "label_in_data_template"}, }, - {"service": "test.script", "data": {"without": "label_id"}}, + {"action": "test.script", "data": {"without": "label_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_1_seq"}, } ], @@ -4094,7 +4094,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_2_seq"}, } ], @@ -4102,7 +4102,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_default_seq"}, } ], @@ -4113,13 +4113,13 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_else"}, } ], @@ -4127,7 +4127,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_parallel"}, } ], @@ -4161,33 +4161,33 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": ["floor_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "{{ 'floor_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"floor_id": "floor_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"floor_id": "floor_in_data_template"}, }, - {"service": "test.script", "data": {"without": "floor_id"}}, + {"action": "test.script", "data": {"without": "floor_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_1_seq"}, } ], @@ -4196,7 +4196,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_2_seq"}, } ], @@ -4204,7 +4204,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_default_seq"}, } ], @@ -4215,13 +4215,13 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_else"}, } ], @@ -4229,7 +4229,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_parallel"}, } ], @@ -4262,33 +4262,33 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": ["area_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": "{{ 'area_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"area_id": "area_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"area_id": "area_in_data_template"}, }, - {"service": "test.script", "data": {"without": "area_id"}}, + {"action": "test.script", "data": {"without": "area_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_1_seq"}, } ], @@ -4297,7 +4297,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_2_seq"}, } ], @@ -4305,7 +4305,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_default_seq"}, } ], @@ -4316,13 +4316,13 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_else"}, } ], @@ -4330,7 +4330,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_parallel"}, } ], @@ -4364,27 +4364,27 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": ["light.service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "{{ 'light.service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "entity_id": "light.direct_entity_referenced", }, { - "service": "test.script", + "action": "test.script", "target": {"entity_id": "light.entity_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "light.entity_in_data_template"}, }, { @@ -4392,7 +4392,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "entity_id": "sensor.condition", "state": "100", }, - {"service": "test.script", "data": {"without": "entity_id"}}, + {"action": "test.script", "data": {"without": "entity_id"}}, {"scene": "scene.hello"}, { "choose": [ @@ -4400,7 +4400,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "conditions": "{{ states.light.choice_1_cond == 'on' }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_1_seq"}, } ], @@ -4413,7 +4413,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_2_seq"}, } ], @@ -4421,7 +4421,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.default_seq"}, } ], @@ -4432,13 +4432,13 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_else"}, } ], @@ -4446,7 +4446,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.parallel"}, } ], @@ -4491,19 +4491,19 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "domain": "switch", }, { - "service": "test.script", + "action": "test.script", "data": {"device_id": "data-string-id"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"device_id": "data-template-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": "target-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": ["target-list-id-1", "target-list-id-2"]}, }, { @@ -4515,7 +4515,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ), "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-1-seq-device-target" }, @@ -4530,7 +4530,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-2-seq-device-target" }, @@ -4540,7 +4540,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "default-device-target"}, } ], @@ -4549,13 +4549,13 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-else"}, } ], @@ -4563,7 +4563,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "parallel-device"}, } ], @@ -5104,7 +5104,7 @@ async def test_set_variable( sequence = cv.SCRIPT_SCHEMA( [ {"alias": alias, "variables": {"variable": "value"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5143,9 +5143,9 @@ async def test_set_redefines_variable( sequence = cv.SCRIPT_SCHEMA( [ {"variables": {"variable": "1"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, {"variables": {"variable": "{{ variable | int + 1 }}"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5214,7 +5214,7 @@ async def test_validate_action_config( } configs = { - cv.SCRIPT_ACTION_CALL_SERVICE: {"service": "light.turn_on"}, + cv.SCRIPT_ACTION_CALL_SERVICE: {"action": "light.turn_on"}, cv.SCRIPT_ACTION_DELAY: {"delay": 5}, cv.SCRIPT_ACTION_WAIT_TEMPLATE: { "wait_template": "{{ states.light.kitchen.state == 'on' }}" @@ -5349,7 +5349,7 @@ async def test_embedded_wait_for_trigger_in_automation(hass: HomeAssistant) -> N } ] }, - {"service": "test.script"}, + {"action": "test.script"}, ], } }, @@ -5704,12 +5704,12 @@ async def test_continue_on_error(hass: HomeAssistant) -> None: {"event": "test_event"}, { "continue_on_error": True, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, { "continue_on_error": False, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, ] @@ -5786,7 +5786,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "service.not_found", + "action": "service.not_found", }, ] ) @@ -5799,7 +5799,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: { "0": [ { - "error": "Service service.not_found not found", + "error": "Action service.not_found not found", "result": { "params": { "domain": "service", @@ -5834,7 +5834,7 @@ async def test_continue_on_error_unknown_error(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "some.service", + "action": "some.service", }, ] ) @@ -5884,7 +5884,7 @@ async def test_disabled_actions( { "alias": "Hello", "enabled": enabled_value, - "service": "broken.service", + "action": "broken.service", }, { "alias": "World", @@ -6255,7 +6255,7 @@ async def test_disallowed_recursion( context = Context() calls = 0 alias = "event step" - sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_2"}) + sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_2"}) script1_obj = script.Script( hass, sequence1, @@ -6265,7 +6265,7 @@ async def test_disallowed_recursion( running_description="test script1", ) - sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_3"}) + sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_3"}) script2_obj = script.Script( hass, sequence2, @@ -6275,7 +6275,7 @@ async def test_disallowed_recursion( running_description="test script2", ) - sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_1"}) + sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_1"}) script3_obj = script.Script( hass, sequence3, @@ -6315,3 +6315,43 @@ async def test_disallowed_recursion( "- test_domain2.Test Name2\n" "- test_domain3.Test Name3" ) in caplog.text + + +async def test_calling_service_backwards_compatible( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the calling of a service with the service instead of the action key.""" + context = Context() + calls = async_mock_service(hass, "test", "script") + + alias = "service step" + sequence = cv.SCRIPT_SCHEMA( + {"alias": alias, "service": "test.script", "data": {"hello": "{{ 'world' }}"}} + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + await script_obj.async_run(context=context) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].context is context + assert calls[0].data.get("hello") == "world" + assert f"Executing step {alias}" in caplog.text + + assert_action_trace( + { + "0": [ + { + "result": { + "params": { + "domain": "test", + "service": "script", + "service_data": {"hello": "world"}, + "target": {}, + }, + "running_script": False, + } + } + ], + } + ) diff --git a/tests/helpers/test_script_variables.py b/tests/helpers/test_script_variables.py index ca942acdf66..3675c857279 100644 --- a/tests/helpers/test_script_variables.py +++ b/tests/helpers/test_script_variables.py @@ -3,7 +3,8 @@ import pytest from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, template +from homeassistant.exceptions import TemplateError +from homeassistant.helpers import config_validation as cv async def test_static_vars() -> None: @@ -110,5 +111,5 @@ async def test_template_vars_run_args_no_default(hass: HomeAssistant) -> None: async def test_template_vars_error(hass: HomeAssistant) -> None: """Test template vars.""" var = cv.SCRIPT_VARIABLES_SCHEMA({"hello": "{{ canont.work }}"}) - with pytest.raises(template.TemplateError): + with pytest.raises(TemplateError): var.async_render(hass, None) diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index e93ec3b8c22..de8c3555831 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -739,12 +739,13 @@ def test_attribute_selector_schema( ( {"seconds": 10}, {"days": 10}, # Days is allowed also if `enable_day` is not set + {"milliseconds": 500}, ), (None, {}), ), ( - {"enable_day": True}, - ({"seconds": 10}, {"days": 10}), + {"enable_day": True, "enable_millisecond": True}, + ({"seconds": 10}, {"days": 10}, {"milliseconds": 500}), (None, {}), ), ( diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 9c5cda67725..81cc189e1af 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -405,7 +405,7 @@ async def test_service_call(hass: HomeAssistant) -> None: """Test service call with templating.""" calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data": { "hello": "{{ 'goodbye' }}", @@ -435,7 +435,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": { "area_id": ["area-42", "{{ 'area-51' }}"], "device_id": ["abcdef", "{{ 'fedcba' }}"], @@ -455,7 +455,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": "{{ var_target }}", } @@ -542,19 +542,19 @@ async def test_split_entity_string(hass: HomeAssistant) -> None: await service.async_call_from_config( hass, { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) await hass.async_block_till_done() - assert ["hello.world", "sensor.beer"] == calls[-1].data.get("entity_id") + assert calls[-1].data.get("entity_id") == ["hello.world", "sensor.beer"] async def test_not_mutate_input(hass: HomeAssistant) -> None: """Test for immutable input.""" async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, @@ -581,7 +581,7 @@ async def test_fail_silently_if_no_service(mock_log, hass: HomeAssistant) -> Non await service.async_call_from_config(hass, {}) assert mock_log.call_count == 2 - await service.async_call_from_config(hass, {"service": "invalid"}) + await service.async_call_from_config(hass, {"action": "invalid"}) assert mock_log.call_count == 3 @@ -597,7 +597,7 @@ async def test_service_call_entry_id( assert entry.entity_id == "hello.world" config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": entry.id}, } @@ -613,7 +613,7 @@ async def test_service_call_all_none(hass: HomeAssistant, target) -> None: calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": target}, } @@ -1792,10 +1792,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non call_1 = ServiceCall("test", "service", data={"entity_id": ENTITY_MATCH_ALL}) - assert ["test_domain.test_1", "test_domain.test_3"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_1)) - ] + ] == ["test_domain.test_1", "test_domain.test_3"] call_2 = ServiceCall( "test", @@ -1803,10 +1803,10 @@ async def test_extract_from_service_available_device(hass: HomeAssistant) -> Non data={"entity_id": ["test_domain.test_3", "test_domain.test_4"]}, ) - assert ["test_domain.test_3"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call_2)) - ] + ] == ["test_domain.test_3"] assert ( await service.async_extract_entities( @@ -1830,10 +1830,10 @@ async def test_extract_from_service_empty_if_no_entity_id(hass: HomeAssistant) - ] call = ServiceCall("test", "service") - assert [] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] + ] == [] async def test_extract_from_service_filter_out_non_existing_entities( @@ -1851,10 +1851,10 @@ async def test_extract_from_service_filter_out_non_existing_entities( {"entity_id": ["test_domain.test_2", "test_domain.non_exist"]}, ) - assert ["test_domain.test_2"] == [ + assert [ ent.entity_id for ent in (await service.async_extract_entities(hass, entities, call)) - ] + ] == ["test_domain.test_2"] async def test_extract_from_service_area_id( diff --git a/tests/helpers/test_update_coordinator.py b/tests/helpers/test_update_coordinator.py index 8633bf862a5..d450d924f1f 100644 --- a/tests/helpers/test_update_coordinator.py +++ b/tests/helpers/test_update_coordinator.py @@ -13,7 +13,11 @@ import requests from homeassistant import config_entries from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import CoreState, HomeAssistant, callback -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ( + ConfigEntryAuthFailed, + ConfigEntryError, + ConfigEntryNotReady, +) from homeassistant.helpers import update_coordinator from homeassistant.util.dt import utcnow @@ -525,11 +529,19 @@ async def test_stop_refresh_on_ha_stop( @pytest.mark.parametrize( "err_msg", - KNOWN_ERRORS, + [ + *KNOWN_ERRORS, + (Exception(), Exception, "Unknown exception"), + ], +) +@pytest.mark.parametrize( + "method", + ["update_method", "setup_method"], ) async def test_async_config_entry_first_refresh_failure( err_msg: tuple[Exception, type[Exception], str], crd: update_coordinator.DataUpdateCoordinator[int], + method: str, caplog: pytest.LogCaptureFixture, ) -> None: """Test async_config_entry_first_refresh raises ConfigEntryNotReady on failure. @@ -538,7 +550,7 @@ async def test_async_config_entry_first_refresh_failure( will be caught by config_entries.async_setup which will log it with a decreasing level of logging once the first message is logged. """ - crd.update_method = AsyncMock(side_effect=err_msg[0]) + setattr(crd, method, AsyncMock(side_effect=err_msg[0])) with pytest.raises(ConfigEntryNotReady): await crd.async_config_entry_first_refresh() @@ -548,13 +560,49 @@ async def test_async_config_entry_first_refresh_failure( assert err_msg[2] not in caplog.text +@pytest.mark.parametrize( + "err_msg", + [ + (ConfigEntryError(), ConfigEntryError, "Config entry error"), + (ConfigEntryAuthFailed(), ConfigEntryAuthFailed, "Config entry error"), + ], +) +@pytest.mark.parametrize( + "method", + ["update_method", "setup_method"], +) +async def test_async_config_entry_first_refresh_failure_passed_through( + err_msg: tuple[Exception, type[Exception], str], + crd: update_coordinator.DataUpdateCoordinator[int], + method: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test async_config_entry_first_refresh passes through ConfigEntryError & ConfigEntryAuthFailed. + + Verify we do not log the exception since it + will be caught by config_entries.async_setup which will log it with + a decreasing level of logging once the first message is logged. + """ + setattr(crd, method, AsyncMock(side_effect=err_msg[0])) + + with pytest.raises(err_msg[1]): + await crd.async_config_entry_first_refresh() + + assert crd.last_update_success is False + assert isinstance(crd.last_exception, err_msg[1]) + assert err_msg[2] not in caplog.text + + async def test_async_config_entry_first_refresh_success( crd: update_coordinator.DataUpdateCoordinator[int], caplog: pytest.LogCaptureFixture ) -> None: """Test first refresh successfully.""" + + crd.setup_method = AsyncMock() await crd.async_config_entry_first_refresh() assert crd.last_update_success is True + crd.setup_method.assert_called_once() async def test_not_schedule_refresh_if_system_option_disable_polling( diff --git a/tests/patch_recorder.py b/tests/patch_recorder.py new file mode 100644 index 00000000000..4993e84fc30 --- /dev/null +++ b/tests/patch_recorder.py @@ -0,0 +1,27 @@ +"""Patch recorder related functions.""" + +from __future__ import annotations + +from contextlib import contextmanager +import sys + +# Patch recorder util session scope +from homeassistant.helpers import recorder as recorder_helper # noqa: E402 + +# Make sure homeassistant.components.recorder.util is not already imported +assert "homeassistant.components.recorder.util" not in sys.modules + +real_session_scope = recorder_helper.session_scope + + +@contextmanager +def _session_scope_wrapper(*args, **kwargs): + """Make session_scope patchable. + + This function will be imported by recorder modules. + """ + with real_session_scope(*args, **kwargs) as ses: + yield ses + + +recorder_helper.session_scope = _session_scope_wrapper diff --git a/tests/patch_time.py b/tests/patch_time.py index a93d3c8ec4f..362296ab8b2 100644 --- a/tests/patch_time.py +++ b/tests/patch_time.py @@ -5,9 +5,7 @@ from __future__ import annotations import datetime import time -from homeassistant import runner, util -from homeassistant.helpers import event as event_helper -from homeassistant.util import dt as dt_util +# Do not add any Home Assistant import here def _utcnow() -> datetime.datetime: @@ -20,10 +18,21 @@ def _monotonic() -> float: return time.monotonic() -# Replace partial functions which are not found by freezegun +# Before importing any other Home Assistant functionality, import and replace +# partial dt_util.utcnow with a regular function which can be found by freezegun +from homeassistant import util # noqa: E402 +from homeassistant.util import dt as dt_util # noqa: E402 + dt_util.utcnow = _utcnow # type: ignore[assignment] -event_helper.time_tracker_utcnow = _utcnow # type: ignore[assignment] util.utcnow = _utcnow # type: ignore[assignment] + +# Import other Home Assistant functionality which we need to patch +from homeassistant import runner # noqa: E402 +from homeassistant.helpers import event as event_helper # noqa: E402 + +# Replace partial functions which are not found by freezegun +event_helper.time_tracker_utcnow = _utcnow # type: ignore[assignment] + # Replace bound methods which are not found by freezegun runner.monotonic = _monotonic # type: ignore[assignment] diff --git a/tests/pylint/test_enforce_type_hints.py b/tests/pylint/test_enforce_type_hints.py index 5b1c494568d..b1692d1d60d 100644 --- a/tests/pylint/test_enforce_type_hints.py +++ b/tests/pylint/test_enforce_type_hints.py @@ -55,6 +55,7 @@ def test_regex_get_module_platform( ("list[dict[str, Any]]", 1, ("list", "dict[str, Any]")), ("tuple[bytes | None, str | None]", 2, ("tuple", "bytes | None", "str | None")), ("Callable[[], TestServer]", 2, ("Callable", "[]", "TestServer")), + ("pytest.CaptureFixture[str]", 1, ("pytest.CaptureFixture", "str")), ], ) def test_regex_x_of_y_i( @@ -1264,6 +1265,7 @@ def test_pytest_fixture(linter: UnittestLinter, type_hint_checker: BaseChecker) def sample_fixture( #@ hass: HomeAssistant, caplog: pytest.LogCaptureFixture, + capsys: pytest.CaptureFixture[str], aiohttp_server: Callable[[], TestServer], unused_tcp_port_factory: Callable[[], int], enable_custom_integrations: None, diff --git a/tests/scripts/test_auth.py b/tests/scripts/test_auth.py index f497751a4d7..002807f08a5 100644 --- a/tests/scripts/test_auth.py +++ b/tests/scripts/test_auth.py @@ -1,6 +1,7 @@ """Test the auth script to manage local users.""" from asyncio import AbstractEventLoop +from collections.abc import Generator import logging from typing import Any from unittest.mock import Mock, patch @@ -15,7 +16,7 @@ from tests.common import register_auth_provider @pytest.fixture(autouse=True) -def reset_log_level(): +def reset_log_level() -> Generator[None]: """Reset log level after each test case.""" logger = logging.getLogger("homeassistant.core") orig_level = logger.level @@ -24,7 +25,7 @@ def reset_log_level(): @pytest.fixture -def provider(hass): +def provider(hass: HomeAssistant) -> hass_auth.HassAuthProvider: """Home Assistant auth provider.""" provider = hass.loop.run_until_complete( register_auth_provider(hass, {"type": "homeassistant"}) @@ -33,7 +34,11 @@ def provider(hass): return provider -async def test_list_user(hass: HomeAssistant, provider, capsys) -> None: +async def test_list_user( + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], +) -> None: """Test we can list users.""" data = provider.data data.add_auth("test-user", "test-pass") @@ -47,7 +52,10 @@ async def test_list_user(hass: HomeAssistant, provider, capsys) -> None: async def test_add_user( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test we can add a user.""" data = provider.data @@ -64,7 +72,11 @@ async def test_add_user( data.validate_login("paulus", "test-pass") -async def test_validate_login(hass: HomeAssistant, provider, capsys) -> None: +async def test_validate_login( + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], +) -> None: """Test we can validate a user login.""" data = provider.data data.add_auth("test-user", "test-pass") @@ -89,7 +101,10 @@ async def test_validate_login(hass: HomeAssistant, provider, capsys) -> None: async def test_change_password( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test we can change a password.""" data = provider.data @@ -108,7 +123,10 @@ async def test_change_password( async def test_change_password_invalid_user( - hass: HomeAssistant, provider, capsys, hass_storage: dict[str, Any] + hass: HomeAssistant, + provider: hass_auth.HassAuthProvider, + capsys: pytest.CaptureFixture[str], + hass_storage: dict[str, Any], ) -> None: """Test changing password of non-existing user.""" data = provider.data diff --git a/tests/snapshots/test_config_entries.ambr b/tests/snapshots/test_config_entries.ambr index bfb583ba8db..136749dfb14 100644 --- a/tests/snapshots/test_config_entries.ambr +++ b/tests/snapshots/test_config_entries.ambr @@ -1,12 +1,14 @@ # serializer version: 1 # name: test_as_dict dict({ + 'created_at': '2024-02-14T12:00:00+00:00', 'data': dict({ }), 'disabled_by': None, 'domain': 'test', 'entry_id': 'mock-entry', 'minor_version': 1, + 'modified_at': '2024-02-14T12:00:00+00:00', 'options': dict({ }), 'pref_disable_new_entities': False, diff --git a/tests/syrupy.py b/tests/syrupy.py index 52bd5756798..0bdbcf99e2b 100644 --- a/tests/syrupy.py +++ b/tests/syrupy.py @@ -12,13 +12,7 @@ import attr import attrs from syrupy.extensions.amber import AmberDataSerializer, AmberSnapshotExtension from syrupy.location import PyTestLocation -from syrupy.types import ( - PropertyFilter, - PropertyMatcher, - PropertyPath, - SerializableData, - SerializedData, -) +from syrupy.types import PropertyFilter, PropertyMatcher, PropertyPath, SerializableData import voluptuous as vol import voluptuous_serialize @@ -90,7 +84,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): matcher: PropertyMatcher | None = None, path: PropertyPath = (), visited: set[Any] | None = None, - ) -> SerializedData: + ) -> str: """Pre-process data before serializing. This allows us to handle specific cases for Home Assistant data structures. @@ -111,7 +105,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serializable_data = voluptuous_serialize.convert(data) elif isinstance(data, ConfigEntry): serializable_data = cls._serializable_config_entry(data) - elif dataclasses.is_dataclass(data): + elif dataclasses.is_dataclass(type(data)): serializable_data = dataclasses.asdict(data) elif isinstance(data, IntFlag): # The repr of an enum.IntFlag has changed between Python 3.10 and 3.11 @@ -120,7 +114,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): else: serializable_data = data with suppress(TypeError): - if attr.has(data): + if attr.has(type(data)): serializable_data = attrs.asdict(data) return super()._serialize( @@ -136,14 +130,15 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): @classmethod def _serializable_area_registry_entry(cls, data: ar.AreaEntry) -> SerializableData: """Prepare a Home Assistant area registry entry for serialization.""" - serialized = AreaRegistryEntrySnapshot(attrs.asdict(data) | {"id": ANY}) + serialized = AreaRegistryEntrySnapshot(dataclasses.asdict(data) | {"id": ANY}) serialized.pop("_json_repr") return serialized @classmethod def _serializable_config_entry(cls, data: ConfigEntry) -> SerializableData: """Prepare a Home Assistant config entry for serialization.""" - return ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) + entry = ConfigEntrySnapshot(data.as_dict() | {"entry_id": ANY}) + return cls._remove_created_and_modified_at(entry) @classmethod def _serializable_device_registry_entry( @@ -161,7 +156,16 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): serialized["via_device_id"] = ANY if serialized["primary_config_entry"] is not None: serialized["primary_config_entry"] = ANY - return serialized + return cls._remove_created_and_modified_at(serialized) + + @classmethod + def _remove_created_and_modified_at( + cls, data: SerializableData + ) -> SerializableData: + """Remove created_at and modified_at from the data.""" + data.pop("created_at", None) + data.pop("modified_at", None) + return data @classmethod def _serializable_entity_registry_entry( @@ -178,7 +182,7 @@ class HomeAssistantSnapshotSerializer(AmberDataSerializer): } ) serialized.pop("categories") - return serialized + return cls._remove_created_and_modified_at(serialized) @classmethod def _serializable_flow_result(cls, data: FlowResult) -> SerializableData: diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index ae77fbee217..ef4f9df60f6 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -44,7 +44,7 @@ async def test_protect_loop_debugger_sleep(caplog: pytest.LogCaptureFixture) -> return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 assert "Detected blocking call inside the event loop" not in caplog.text @@ -71,7 +71,7 @@ async def test_protect_loop_sleep() -> None: return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 async def test_protect_loop_sleep_get_current_frame_raises() -> None: @@ -97,7 +97,7 @@ async def test_protect_loop_sleep_get_current_frame_raises() -> None: return_value=frames, ), ): - time.sleep(0) + time.sleep(0) # noqa: ASYNC251 async def test_protect_loop_importlib_import_module_non_integration( @@ -211,7 +211,7 @@ async def test_protect_loop_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/proc/does_not_exist", encoding="utf8"), + open("/proc/does_not_exist", encoding="utf8"), # noqa: ASYNC230 ): pass assert "Detected blocking call to open with args" not in caplog.text @@ -223,7 +223,7 @@ async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), + open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 ): pass @@ -253,7 +253,7 @@ async def test_protect_open_path(path: Any, caplog: pytest.LogCaptureFixture) -> """Test opening a file by path in the event loop logs.""" with patch.object(block_async_io, "_IN_TESTS", False): block_async_io.enable() - with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): + with contextlib.suppress(FileNotFoundError), open(path, encoding="utf8"): # noqa: ASYNC230 pass assert "Detected blocking call to open with args" in caplog.text @@ -336,7 +336,7 @@ async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> block_async_io.enable() with ( contextlib.suppress(FileNotFoundError), - open("/config/data_not_exist", encoding="utf8"), + open("/config/data_not_exist", encoding="utf8"), # noqa: ASYNC230 ): pass diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 56599a15d34..278bfc631fd 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -1,7 +1,7 @@ """Test the bootstrapping.""" import asyncio -from collections.abc import Iterable +from collections.abc import Generator, Iterable import contextlib import glob import logging @@ -11,19 +11,21 @@ from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest -from typing_extensions import Generator from homeassistant import bootstrap, loader, runner import homeassistant.config as config_util from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEBUG, SIGNAL_BOOTSTRAP_INTEGRATIONS +from homeassistant.const import ( + BASE_PLATFORMS, + CONF_DEBUG, + SIGNAL_BOOTSTRAP_INTEGRATIONS, +) from homeassistant.core import CoreState, HomeAssistant, async_get_hass, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.translation import async_translations_loaded from homeassistant.helpers.typing import ConfigType from homeassistant.loader import Integration -from homeassistant.setup import BASE_PLATFORMS from .common import ( MockConfigEntry, @@ -434,9 +436,6 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: MockModule( domain="recorder", async_setup=gen_domain_setup("recorder"), - partial_manifest={ - "after_dependencies": ["http"], - }, ), ) @@ -1327,6 +1326,34 @@ async def test_bootstrap_dependencies( ) +@pytest.mark.parametrize("load_registries", [False]) +async def test_bootstrap_dependency_not_found( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test setup when an integration has missing dependencies.""" + mock_integration( + hass, + MockModule("good_integration", dependencies=[]), + ) + # Simulate an integration with missing dependencies. While a core integration + # can't have missing dependencies thanks to checks by hassfest, there's no such + # guarantee for custom integrations. + mock_integration( + hass, + MockModule("bad_integration", dependencies=["hahaha_crash_and_burn"]), + ) + + assert await bootstrap.async_from_config_dict( + {"good_integration": {}, "bad_integration": {}}, hass + ) + + assert "good_integration" in hass.config.components + assert "bad_integration" not in hass.config.components + + assert "Unable to resolve dependencies for bad_integration" in caplog.text + + async def test_pre_import_no_requirements(hass: HomeAssistant) -> None: """Test pre-imported and do not have any requirements.""" pre_imports = [ diff --git a/tests/test_config.py b/tests/test_config.py index 7f94317afea..c7039cabe8b 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,17 +2,18 @@ import asyncio from collections import OrderedDict +from collections.abc import Generator import contextlib import copy import logging import os +from pathlib import Path from typing import Any from unittest import mock from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator import voluptuous as vol from voluptuous import Invalid, MultipleInvalid import yaml @@ -28,15 +29,15 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, + CONF_PACKAGES, __version__, ) from homeassistant.core import ( - DOMAIN as HA_DOMAIN, + DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, - HomeAssistantError, ) -from homeassistant.exceptions import ConfigValidationError +from homeassistant.exceptions import ConfigValidationError, HomeAssistantError from homeassistant.helpers import ( check_config, config_validation as cv, @@ -412,11 +413,10 @@ async def test_ensure_config_exists_creates_config(hass: HomeAssistant) -> None: async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> None: """Test that calling ensure_config_exists uses existing config.""" - create_file(YAML_PATH) + await hass.async_add_executor_job(create_file, YAML_PATH) await config_util.async_ensure_config_exists(hass) - with open(YAML_PATH, encoding="utf8") as fp: - content = fp.read() + content = await hass.async_add_executor_job(Path(YAML_PATH).read_text) # File created with create_file are empty assert content == "" @@ -424,12 +424,11 @@ async def test_ensure_config_exists_uses_existing_config(hass: HomeAssistant) -> async def test_ensure_existing_files_is_not_overwritten(hass: HomeAssistant) -> None: """Test that calling async_create_default_config does not overwrite existing files.""" - create_file(SECRET_PATH) + await hass.async_add_executor_job(create_file, SECRET_PATH) await config_util.async_create_default_config(hass) - with open(SECRET_PATH, encoding="utf8") as fp: - content = fp.read() + content = await hass.async_add_executor_job(Path(SECRET_PATH).read_text) # File created with create_file are empty assert content == "" @@ -490,9 +489,10 @@ def test_load_yaml_config_preserves_key_order() -> None: fp.write("hello: 2\n") fp.write("world: 1\n") - assert [("hello", 2), ("world", 1)] == list( - config_util.load_yaml_config_file(YAML_PATH).items() - ) + assert list(config_util.load_yaml_config_file(YAML_PATH).items()) == [ + ("hello", 2), + ("world", 1), + ] async def test_create_default_config_returns_none_if_write_error( @@ -1070,10 +1070,8 @@ async def test_check_ha_config_file_wrong(mock_check, hass: HomeAssistant) -> No "hass_config", [ { - HA_DOMAIN: { - config_util.CONF_PACKAGES: { - "pack_dict": {"input_boolean": {"ib1": None}} - } + HOMEASSISTANT_DOMAIN: { + CONF_PACKAGES: {"pack_dict": {"input_boolean": {"ib1": None}}} }, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, @@ -1088,7 +1086,7 @@ async def test_async_hass_config_yaml_merge( conf = await config_util.async_hass_config_yaml(hass) assert merge_log_err.call_count == 0 - assert conf[HA_DOMAIN].get(config_util.CONF_PACKAGES) is not None + assert conf[HOMEASSISTANT_DOMAIN].get(CONF_PACKAGES) is not None assert len(conf) == 3 assert len(conf["input_boolean"]) == 2 assert len(conf["light"]) == 1 @@ -1116,7 +1114,7 @@ async def test_merge(merge_log_err: MagicMock, hass: HomeAssistant) -> None: }, } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "light": {"platform": "test"}, "automation": [], @@ -1143,7 +1141,7 @@ async def test_merge_try_falsy(merge_log_err: MagicMock, hass: HomeAssistant) -> "pack_list2": {"light": OrderedDict()}, } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "automation": {"do": "something"}, "light": {"some": "light"}, } @@ -1166,7 +1164,7 @@ async def test_merge_new(merge_log_err: MagicMock, hass: HomeAssistant) -> None: "api": {}, }, } - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert merge_log_err.call_count == 0 @@ -1186,7 +1184,7 @@ async def test_merge_type_mismatch( "pack_2": {"light": {"ib1": None}}, # light gets merged - ensure_list } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_boolean": {"ib2": None}, "input_select": [{"ib2": None}], "light": [{"platform": "two"}], @@ -1204,13 +1202,13 @@ async def test_merge_once_only_keys( ) -> None: """Test if we have a merge for a comp that may occur only once. Keys.""" packages = {"pack_2": {"api": None}} - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, "api": None} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": None} await config_util.merge_packages_config(hass, config, packages) assert config["api"] == OrderedDict() packages = {"pack_2": {"api": {"key_3": 3}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"key_1": 1, "key_2": 2}, } await config_util.merge_packages_config(hass, config, packages) @@ -1219,7 +1217,7 @@ async def test_merge_once_only_keys( # Duplicate keys error packages = {"pack_2": {"api": {"key": 2}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"key": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -1234,7 +1232,7 @@ async def test_merge_once_only_lists(hass: HomeAssistant) -> None: } } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"list_1": ["item_1"]}, } await config_util.merge_packages_config(hass, config, packages) @@ -1257,7 +1255,7 @@ async def test_merge_once_only_dictionaries(hass: HomeAssistant) -> None: } } config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "api": {"dict_1": {"key_1": 1, "dict_1.1": {"key_1.1": 1.1}}}, } await config_util.merge_packages_config(hass, config, packages) @@ -1293,7 +1291,7 @@ async def test_merge_duplicate_keys( """Test if keys in dicts are duplicates.""" packages = {"pack_1": {"input_select": {"ib1": None}}} config = { - HA_DOMAIN: {config_util.CONF_PACKAGES: packages}, + HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}, "input_select": {"ib1": 1}, } await config_util.merge_packages_config(hass, config, packages) @@ -1451,7 +1449,7 @@ async def test_merge_split_component_definition(hass: HomeAssistant) -> None: "pack_1": {"light one": {"l1": None}}, "pack_2": {"light two": {"l2": None}, "light three": {"l3": None}}, } - config = {HA_DOMAIN: {config_util.CONF_PACKAGES: packages}} + config = {HOMEASSISTANT_DOMAIN: {CONF_PACKAGES: packages}} await config_util.merge_packages_config(hass, config, packages) assert len(config) == 4 @@ -2340,7 +2338,7 @@ async def test_packages_schema_validation_error( ] assert error_records == snapshot - assert len(config[HA_DOMAIN][config_util.CONF_PACKAGES]) == 0 + assert len(config[HOMEASSISTANT_DOMAIN][CONF_PACKAGES]) == 0 def test_extract_domain_configs() -> None: diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index cba7ad8f215..2a5dff5c14a 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -3,16 +3,17 @@ from __future__ import annotations import asyncio +from collections.abc import Generator from datetime import timedelta from functools import cached_property import logging from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch +from freezegun import freeze_time from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from typing_extensions import Generator from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp @@ -22,7 +23,12 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, ) -from homeassistant.core import DOMAIN as HA_DOMAIN, CoreState, HomeAssistant, callback +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + CoreState, + HomeAssistant, + callback, +) from homeassistant.data_entry_flow import BaseServiceInfo, FlowResult, FlowResultType from homeassistant.exceptions import ( ConfigEntryAuthFailed, @@ -46,6 +52,7 @@ from .common import ( async_capture_events, async_fire_time_changed, async_get_persistent_notifications, + flush_store, mock_config_flow, mock_integration, mock_platform, @@ -526,13 +533,13 @@ async def test_remove_entry_cancels_reauth( assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR issue_id = f"config_entry_reauth_test_{entry.entry_id}" - assert issue_registry.async_get_issue(HA_DOMAIN, issue_id) + assert issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) await manager.async_remove(entry.entry_id) flows = hass.config_entries.flow.async_progress_by_handler("test") assert len(flows) == 0 - assert not issue_registry.async_get_issue(HA_DOMAIN, issue_id) + assert not issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) async def test_remove_entry_handles_callback_error( @@ -907,6 +914,7 @@ async def test_saving_and_loading( assert orig.as_dict() == loaded.as_dict() +@freeze_time("2024-02-14 12:00:00") async def test_as_dict(snapshot: SnapshotAssertion) -> None: """Test ConfigEntry.as_dict.""" @@ -1189,14 +1197,14 @@ async def test_reauth_issue( assert len(issue_registry.issues) == 1 issue_id = f"config_entry_reauth_test_{entry.entry_id}" - issue = issue_registry.async_get_issue(HA_DOMAIN, issue_id) + issue = issue_registry.async_get_issue(HOMEASSISTANT_DOMAIN, issue_id) assert issue == ir.IssueEntry( active=True, breaks_in_ha_version=None, created=ANY, data={"flow_id": flows[0]["flow_id"]}, dismissed_version=None, - domain=HA_DOMAIN, + domain=HOMEASSISTANT_DOMAIN, is_fixable=False, is_persistent=False, issue_domain="test", @@ -1246,8 +1254,11 @@ async def test_loading_default_config(hass: HomeAssistant) -> None: assert len(manager.async_entries()) == 0 -async def test_updating_entry_data(manager: config_entries.ConfigEntries) -> None: +async def test_updating_entry_data( + manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory +) -> None: """Test that we can update an entry data.""" + created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1255,17 +1266,32 @@ async def test_updating_entry_data(manager: config_entries.ConfigEntries) -> Non ) entry.add_to_manager(manager) + assert len(manager.async_entries()) == 1 + assert manager.async_entries()[0] == entry + assert entry.created_at == created + assert entry.modified_at == created + + freezer.tick() + assert manager.async_update_entry(entry) is False assert entry.data == {"first": True} + assert entry.modified_at == created + assert manager.async_entries()[0].modified_at == created + + freezer.tick() + modified = dt_util.utcnow() assert manager.async_update_entry(entry, data={"second": True}) is True assert entry.data == {"second": True} + assert entry.modified_at == modified + assert manager.async_entries()[0].modified_at == modified async def test_updating_entry_system_options( - manager: config_entries.ConfigEntries, + manager: config_entries.ConfigEntries, freezer: FrozenDateTimeFactory ) -> None: """Test that we can update an entry data.""" + created = dt_util.utcnow() entry = MockConfigEntry( domain="test", data={"first": True}, @@ -1276,6 +1302,11 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is True assert entry.pref_disable_polling is False + assert entry.created_at == created + assert entry.modified_at == created + + freezer.tick() + modified = dt_util.utcnow() manager.async_update_entry( entry, pref_disable_new_entities=False, pref_disable_polling=True @@ -1283,6 +1314,8 @@ async def test_updating_entry_system_options( assert entry.pref_disable_new_entities is False assert entry.pref_disable_polling is True + assert entry.created_at == created + assert entry.modified_at == modified async def test_update_entry_options_and_trigger_listener( @@ -5098,7 +5131,7 @@ async def test_hashable_non_string_unique_id( { "type": data_entry_flow.FlowResultType.ABORT, "reason": "single_instance_allowed", - "translation_domain": HA_DOMAIN, + "translation_domain": HOMEASSISTANT_DOMAIN, }, ), ], @@ -5296,7 +5329,7 @@ async def test_avoid_adding_second_config_entry_on_single_config_entry( ) assert result["type"] == data_entry_flow.FlowResultType.ABORT assert result["reason"] == "single_instance_allowed" - assert result["translation_domain"] == HA_DOMAIN + assert result["translation_domain"] == HOMEASSISTANT_DOMAIN async def test_in_progress_get_canceled_when_entry_is_created( @@ -5903,3 +5936,67 @@ async def test_config_entry_late_platform_setup( "entry_id test2 cannot forward setup for light because it is " "not loaded in the ConfigEntryState.NOT_LOADED state" ) not in caplog.text + + +@pytest.mark.parametrize("load_registries", [False]) +async def test_migration_from_1_2( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test migration from version 1.2.""" + hass_storage[config_entries.STORAGE_KEY] = { + "version": 1, + "minor_version": 2, + "data": { + "entries": [ + { + "data": {}, + "disabled_by": None, + "domain": "sun", + "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", + "minor_version": 1, + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "import", + "title": "Sun", + "unique_id": None, + "version": 1, + }, + ] + }, + } + + manager = config_entries.ConfigEntries(hass, {}) + await manager.async_initialize() + + # Test data was loaded + entries = manager.async_entries() + assert len(entries) == 1 + + # Check we store migrated data + await flush_store(manager._store) + assert hass_storage[config_entries.STORAGE_KEY] == { + "version": config_entries.STORAGE_VERSION, + "minor_version": config_entries.STORAGE_VERSION_MINOR, + "key": config_entries.STORAGE_KEY, + "data": { + "entries": [ + { + "created_at": "1970-01-01T00:00:00+00:00", + "data": {}, + "disabled_by": None, + "domain": "sun", + "entry_id": "0a8bd02d0d58c7debf5daf7941c9afe2", + "minor_version": 1, + "modified_at": "1970-01-01T00:00:00+00:00", + "options": {}, + "pref_disable_new_entities": False, + "pref_disable_polling": False, + "source": "import", + "title": "Sun", + "unique_id": None, + "version": 1, + }, + ] + }, + } diff --git a/tests/test_const.py b/tests/test_const.py index a6a2387b091..64ccb875cf5 100644 --- a/tests/test_const.py +++ b/tests/test_const.py @@ -15,7 +15,7 @@ from .common import ( def _create_tuples( - value: Enum | list[Enum], constant_prefix: str + value: type[Enum] | list[Enum], constant_prefix: str ) -> list[tuple[Enum, str]]: return [(enum, constant_prefix) for enum in value] diff --git a/tests/test_core.py b/tests/test_core.py index 5e6b51cc39e..8035236fd08 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -9,6 +9,7 @@ import functools import gc import logging import os +from pathlib import Path import re from tempfile import TemporaryDirectory import threading @@ -920,6 +921,14 @@ def test_event_repr() -> None: ) +def test_event_origin_idx() -> None: + """Test the EventOrigin idx.""" + assert ha.EventOrigin.remote is ha.EventOrigin.remote + assert ha.EventOrigin.local is ha.EventOrigin.local + assert ha.EventOrigin.local.idx == 0 + assert ha.EventOrigin.remote.idx == 1 + + def test_event_as_dict() -> None: """Test an Event as dictionary.""" event_type = "some_type" @@ -1620,7 +1629,7 @@ async def test_serviceregistry_call_non_existing_with_blocking( hass: HomeAssistant, ) -> None: """Test non-existing with blocking.""" - with pytest.raises(ha.ServiceNotFound): + with pytest.raises(ServiceNotFound): await hass.services.async_call("test_domain", "i_do_not_exist", blocking=True) @@ -1706,7 +1715,7 @@ async def test_serviceregistry_service_that_not_exists(hass: HomeAssistant) -> N assert exc.value.domain == "test_do_not" assert exc.value.service == "exist" - assert str(exc.value) == "Service test_do_not.exist not found" + assert str(exc.value) == "Action test_do_not.exist not found" async def test_serviceregistry_async_service_raise_exception( @@ -1797,7 +1806,7 @@ async def test_services_call_return_response_requires_blocking( return_response=True, ) assert str(exc.value) == ( - "A non blocking service call with argument blocking=False " + "A non blocking action call with argument blocking=False " "can't be used together with argument return_response=True" ) @@ -1843,7 +1852,7 @@ async def test_serviceregistry_return_response_invalid( ("supports_response", "return_response", "expected_error"), [ (SupportsResponse.NONE, True, "does not return responses"), - (SupportsResponse.ONLY, False, "call requires responses"), + (SupportsResponse.ONLY, False, "action requires responses"), ], ) async def test_serviceregistry_return_response_arguments( @@ -2001,8 +2010,9 @@ async def test_config_is_allowed_path() -> None: config.allowlist_external_dirs = {os.path.realpath(tmp_dir)} test_file = os.path.join(tmp_dir, "test.jpg") - with open(test_file, "w", encoding="utf8") as tmp_file: - tmp_file.write("test") + await asyncio.get_running_loop().run_in_executor( + None, Path(test_file).write_text, "test" + ) valid = [test_file, tmp_dir, os.path.join(tmp_dir, "notfound321")] for path in valid: @@ -2519,14 +2529,14 @@ async def test_reserving_states(hass: HomeAssistant) -> None: hass.states.async_set("light.bedroom", "on") assert hass.states.async_available("light.bedroom") is False - with pytest.raises(ha.HomeAssistantError): + with pytest.raises(HomeAssistantError): hass.states.async_reserve("light.bedroom") hass.states.async_remove("light.bedroom") assert hass.states.async_available("light.bedroom") is True hass.states.async_set("light.bedroom", "on") - with pytest.raises(ha.HomeAssistantError): + with pytest.raises(HomeAssistantError): hass.states.async_reserve("light.bedroom") assert hass.states.async_available("light.bedroom") is False @@ -2830,7 +2840,7 @@ async def test_state_change_events_context_id_match_state_time( hass: HomeAssistant, ) -> None: """Test last_updated, timed_fired, and the ulid all have the same time.""" - events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) + events = async_capture_events(hass, EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -2849,7 +2859,7 @@ async def test_state_change_events_match_time_with_limits_of_precision( a bit better than the precision of datetime.now() which is used for last_updated on some platforms. """ - events = async_capture_events(hass, ha.EVENT_STATE_CHANGED) + events = async_capture_events(hass, EVENT_STATE_CHANGED) hass.states.async_set("light.bedroom", "on") await hass.async_block_till_done() state: State = hass.states.get("light.bedroom") @@ -3221,7 +3231,7 @@ async def test_async_add_import_executor_job(hass: HomeAssistant) -> None: evt = threading.Event() loop = asyncio.get_running_loop() - def executor_func() -> None: + def executor_func() -> threading.Event: evt.set() return evt diff --git a/tests/test_requirements.py b/tests/test_requirements.py index 161214160aa..2885fa30036 100644 --- a/tests/test_requirements.py +++ b/tests/test_requirements.py @@ -602,12 +602,12 @@ async def test_discovery_requirements_ssdp(hass: HomeAssistant) -> None: ) as mock_process: await async_get_integration_with_requirements(hass, "ssdp_comp") - assert len(mock_process.mock_calls) == 3 + assert len(mock_process.mock_calls) == 2 assert mock_process.mock_calls[0][1][1] == ssdp.requirements assert { + mock_process.mock_calls[0][1][0], mock_process.mock_calls[1][1][0], - mock_process.mock_calls[2][1][0], - } == {"network", "recorder"} + } == {"network", "ssdp"} @pytest.mark.parametrize( @@ -631,7 +631,7 @@ async def test_discovery_requirements_zeroconf( ) as mock_process: await async_get_integration_with_requirements(hass, "comp") - assert len(mock_process.mock_calls) == 3 + assert len(mock_process.mock_calls) == 2 assert mock_process.mock_calls[0][1][1] == zeroconf.requirements diff --git a/tests/test_runner.py b/tests/test_runner.py index 90678454adf..141af4f4bc7 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -2,6 +2,7 @@ import asyncio from collections.abc import Iterator +import subprocess import threading from unittest.mock import patch @@ -169,21 +170,21 @@ def test_enable_posix_spawn() -> None: yield from packaging.tags.parse_tag("cp311-cp311-musllinux_1_1_x86_64") with ( - patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), + patch.object(subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_musl, ), ): runner._enable_posix_spawn() - assert runner.subprocess._USE_POSIX_SPAWN is True + assert subprocess._USE_POSIX_SPAWN is True with ( - patch.object(runner.subprocess, "_USE_POSIX_SPAWN", False), + patch.object(subprocess, "_USE_POSIX_SPAWN", False), patch( "homeassistant.runner.packaging.tags.sys_tags", side_effect=_mock_sys_tags_any, ), ): runner._enable_posix_spawn() - assert runner.subprocess._USE_POSIX_SPAWN is False + assert subprocess._USE_POSIX_SPAWN is False diff --git a/tests/test_setup.py b/tests/test_setup.py index 1e19f1a7b76..4e7c23865da 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -10,13 +10,19 @@ import voluptuous as vol from homeassistant import config_entries, loader, setup from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START -from homeassistant.core import CoreState, HomeAssistant, callback +from homeassistant.core import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + CoreState, + HomeAssistant, + callback, +) from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import config_validation as cv, discovery, translation from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) +from homeassistant.helpers.issue_registry import IssueRegistry from .common import ( MockConfigEntry, @@ -236,9 +242,26 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: hass.config.components.remove("platform_conf") -async def test_component_not_found(hass: HomeAssistant) -> None: - """setup_component should not crash if component doesn't exist.""" +async def test_component_not_found( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should raise a repair issue if component doesn't exist.""" assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, "integration_not_found.non_existing" + ) + assert issue + assert issue.translation_key == "integration_not_found" + + +async def test_component_missing_not_raising_in_safe_mode( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should not raise an issue if component doesn't exist in safe.""" + hass.config.safe_mode = True + assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 0 async def test_component_not_double_initialized(hass: HomeAssistant) -> None: diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index b4b8cfa4b6d..d0bd7fbeb2f 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -1,6 +1,7 @@ """Aiohttp test utils.""" import asyncio +from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re @@ -296,7 +297,7 @@ class AiohttpClientMockResponse: @contextmanager -def mock_aiohttp_client(): +def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() diff --git a/tests/util/test_color.py b/tests/util/test_color.py index 53c243a1e4f..c8a5e0c8587 100644 --- a/tests/util/test_color.py +++ b/tests/util/test_color.py @@ -200,17 +200,17 @@ def test_color_hs_to_xy() -> None: def test_rgb_hex_to_rgb_list() -> None: """Test rgb_hex_to_rgb_list.""" - assert [255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffff") + assert color_util.rgb_hex_to_rgb_list("ffffff") == [255, 255, 255] - assert [0, 0, 0] == color_util.rgb_hex_to_rgb_list("000000") + assert color_util.rgb_hex_to_rgb_list("000000") == [0, 0, 0] - assert [255, 255, 255, 255] == color_util.rgb_hex_to_rgb_list("ffffffff") + assert color_util.rgb_hex_to_rgb_list("ffffffff") == [255, 255, 255, 255] - assert [0, 0, 0, 0] == color_util.rgb_hex_to_rgb_list("00000000") + assert color_util.rgb_hex_to_rgb_list("00000000") == [0, 0, 0, 0] - assert [51, 153, 255] == color_util.rgb_hex_to_rgb_list("3399ff") + assert color_util.rgb_hex_to_rgb_list("3399ff") == [51, 153, 255] - assert [51, 153, 255, 0] == color_util.rgb_hex_to_rgb_list("3399ff00") + assert color_util.rgb_hex_to_rgb_list("3399ff00") == [51, 153, 255, 0] def test_color_name_to_rgb_valid_name() -> None: diff --git a/tests/util/test_dt.py b/tests/util/test_dt.py index 6caca092517..0e8432bbb83 100644 --- a/tests/util/test_dt.py +++ b/tests/util/test_dt.py @@ -294,12 +294,12 @@ def test_parse_time_expression() -> None: assert list(range(0, 60, 5)) == dt_util.parse_time_expression("/5", 0, 59) - assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59) + assert dt_util.parse_time_expression([2, 1, 3], 0, 59) == [1, 2, 3] assert list(range(24)) == dt_util.parse_time_expression("*", 0, 23) - assert [42] == dt_util.parse_time_expression(42, 0, 59) - assert [42] == dt_util.parse_time_expression("42", 0, 59) + assert dt_util.parse_time_expression(42, 0, 59) == [42] + assert dt_util.parse_time_expression("42", 0, 59) == [42] with pytest.raises(ValueError): dt_util.parse_time_expression(61, 0, 60) diff --git a/tests/util/test_json.py b/tests/util/test_json.py index 3a314bb5a1b..05dab46002d 100644 --- a/tests/util/test_json.py +++ b/tests/util/test_json.py @@ -131,34 +131,6 @@ def test_json_loads_object() -> None: json_loads_object("null") -async def test_deprecated_test_find_unserializable_data( - caplog: pytest.LogCaptureFixture, -) -> None: - """Test deprecated test_find_unserializable_data logs a warning.""" - # pylint: disable-next=hass-deprecated-import,import-outside-toplevel - from homeassistant.util.json import find_paths_unserializable_data - - find_paths_unserializable_data(1) - assert ( - "uses find_paths_unserializable_data from homeassistant.util.json" - in caplog.text - ) - assert "should be updated to use homeassistant.helpers.json module" in caplog.text - - -async def test_deprecated_save_json( - caplog: pytest.LogCaptureFixture, tmp_path: Path -) -> None: - """Test deprecated save_json logs a warning.""" - # pylint: disable-next=hass-deprecated-import,import-outside-toplevel - from homeassistant.util.json import save_json - - fname = tmp_path / "test1.json" - save_json(fname, TEST_JSON_A) - assert "uses save_json from homeassistant.util.json" in caplog.text - assert "should be updated to use homeassistant.helpers.json module" in caplog.text - - async def test_loading_derived_class() -> None: """Test loading data from classes derived from str.""" diff --git a/tests/util/test_loop.py b/tests/util/test_loop.py index f4846d98898..3ff7128938f 100644 --- a/tests/util/test_loop.py +++ b/tests/util/test_loop.py @@ -18,7 +18,7 @@ def banned_function(): @contextlib.contextmanager -def patch_get_current_frame(stack: list[Mock]) -> Generator[None, None, None]: +def patch_get_current_frame(stack: list[Mock]) -> Generator[None]: """Patch get_current_frame.""" frames = extract_stack_to_frame(stack) with ( diff --git a/tests/util/test_process.py b/tests/util/test_process.py index ae28f5d82fc..999abe0476f 100644 --- a/tests/util/test_process.py +++ b/tests/util/test_process.py @@ -1,20 +1,25 @@ """Test process util.""" +from functools import partial import os import subprocess import pytest +from homeassistant.core import HomeAssistant from homeassistant.util import process -async def test_kill_process() -> None: +async def test_kill_process(hass: HomeAssistant) -> None: """Test killing a process.""" - sleeper = subprocess.Popen( - "sleep 1000", - shell=True, # noqa: S602 # shell by design - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, + sleeper = await hass.async_add_executor_job( + partial( # noqa: S604 # shell by design + subprocess.Popen, + "sleep 1000", + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) ) pid = sleeper.pid diff --git a/tests/util/test_timeout.py b/tests/util/test_timeout.py index 797c849db3c..1c4b06d99b4 100644 --- a/tests/util/test_timeout.py +++ b/tests/util/test_timeout.py @@ -25,7 +25,7 @@ async def test_simple_global_timeout_with_executor_job(hass: HomeAssistant) -> N with pytest.raises(TimeoutError): async with timeout.async_timeout(0.1): - await hass.async_add_executor_job(lambda: time.sleep(0.2)) + await hass.async_add_executor_job(time.sleep, 0.2) async def test_simple_global_timeout_freeze() -> None: @@ -133,7 +133,7 @@ async def test_mix_global_timeout_freeze_and_zone_freeze_inside_executor_job_sec async with timeout.async_timeout(0.1): async with timeout.async_timeout(0.2, zone_name="recorder"): await hass.async_add_executor_job(_some_sync_work) - await hass.async_add_executor_job(lambda: time.sleep(0.2)) + await hass.async_add_executor_job(time.sleep, 0.2) async def test_simple_global_timeout_freeze_with_executor_job( @@ -143,7 +143,7 @@ async def test_simple_global_timeout_freeze_with_executor_job( timeout = TimeoutManager() async with timeout.async_timeout(0.2), timeout.async_freeze(): - await hass.async_add_executor_job(lambda: time.sleep(0.3)) + await hass.async_add_executor_job(time.sleep, 0.3) async def test_simple_global_timeout_freeze_reset() -> None: @@ -338,3 +338,24 @@ async def test_simple_zone_timeout_zone_with_timeout_exeption() -> None: raise RuntimeError await asyncio.sleep(0.3) + + +async def test_multiple_global_freezes(hass: HomeAssistant) -> None: + """Test multiple global freezes.""" + timeout = TimeoutManager() + + async def background(delay: float) -> None: + async with timeout.async_freeze(): + await asyncio.sleep(delay) + + async with timeout.async_timeout(0.1): + task = hass.async_create_task(background(0.2)) + async with timeout.async_freeze(): + await asyncio.sleep(0.1) + await task + + async with timeout.async_timeout(0.1): + task = hass.async_create_task(background(0.2)) + async with timeout.async_freeze(): + await asyncio.sleep(0.3) + await task diff --git a/tests/util/test_unit_system.py b/tests/util/test_unit_system.py index 033631563f4..15500777212 100644 --- a/tests/util/test_unit_system.py +++ b/tests/util/test_unit_system.py @@ -15,6 +15,7 @@ from homeassistant.const import ( WIND_SPEED, UnitOfLength, UnitOfMass, + UnitOfPrecipitationDepth, UnitOfPressure, UnitOfSpeed, UnitOfTemperature, @@ -42,7 +43,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -55,7 +56,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=INVALID_UNIT, mass=UnitOfMass.GRAMS, @@ -68,7 +69,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -81,7 +82,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, @@ -94,7 +95,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=INVALID_UNIT, @@ -107,7 +108,7 @@ def test_invalid_units() -> None: with pytest.raises(ValueError): UnitSystem( SYSTEM_NAME, - accumulated_precipitation=UnitOfLength.MILLIMETERS, + accumulated_precipitation=UnitOfPrecipitationDepth.MILLIMETERS, conversions={}, length=UnitOfLength.METERS, mass=UnitOfMass.GRAMS, diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 6ea3f1437af..ece65504ed6 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -1,5 +1,6 @@ """Test Home Assistant yaml loader.""" +from collections.abc import Generator import importlib import io import os @@ -9,7 +10,6 @@ import unittest from unittest.mock import Mock, patch import pytest -from typing_extensions import Generator import voluptuous as vol import yaml as pyyaml @@ -566,8 +566,8 @@ def test_no_recursive_secrets() -> None: def test_input_class() -> None: """Test input class.""" - yaml_input = yaml_loader.Input("hello") - yaml_input2 = yaml_loader.Input("hello") + yaml_input = yaml.Input("hello") + yaml_input2 = yaml.Input("hello") assert yaml_input.name == "hello" assert yaml_input == yaml_input2